diff --git a/.github/weekly-digest.yml b/.github/weekly-digest.yml deleted file mode 100644 index fe502fbc98..0000000000 --- a/.github/weekly-digest.yml +++ /dev/null @@ -1,7 +0,0 @@ -# Configuration for weekly-digest - https://github.com/apps/weekly-digest -publishDay: sun -canPublishIssues: true -canPublishPullRequests: true -canPublishContributors: true -canPublishStargazers: true -canPublishCommits: true diff --git a/.github/workflows/documentation.yml b/.github/workflows/documentation.yml new file mode 100644 index 0000000000..bc08868dc1 --- /dev/null +++ b/.github/workflows/documentation.yml @@ -0,0 +1,63 @@ +name: documentation + +on: + pull_request: + branches: [develop] + types: [review_requested, ready_for_review] + paths: + - 'website/**' + push: + branches: [main] + paths: + - 'website/**' + +jobs: + check-build: + if: github.event_name != 'push' + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v1 + - uses: actions/setup-node@v1 + with: + node-version: '12.x' + - name: Test Build + run: | + cd website + if [ -e yarn.lock ]; then + yarn install --frozen-lockfile + elif [ -e package-lock.json ]; then + npm ci + else + npm i + fi + npm run build + deploy-website: + if: github.event_name != 'pull_request' + runs-on: ubuntu-latest + steps: + - name: 🚚 Get latest code + uses: actions/checkout@v2 + + - uses: actions/setup-node@v1 + with: + node-version: '12.x' + + - name: πŸ”¨ Build + run: | + cd website + if [ -e yarn.lock ]; then + yarn install --frozen-lockfile + elif [ -e package-lock.json ]; then + npm ci + else + npm i + fi + npm run build + + - name: πŸ“‚ Sync files + uses: SamKirkland/FTP-Deploy-Action@4.0.0 + with: + server: ftp.openpype.io + username: ${{ secrets.ftp_user }} + password: ${{ secrets.ftp_password }} + local-dir: ./website/build/ \ No newline at end of file diff --git a/.github/workflows/test_build.yml b/.github/workflows/test_build.yml new file mode 100644 index 0000000000..6e1e38d0b2 --- /dev/null +++ b/.github/workflows/test_build.yml @@ -0,0 +1,88 @@ +# This workflow will upload a Python Package using Twine when a release is created +# For more information see: https://help.github.com/en/actions/language-and-framework-guides/using-python-with-github-actions#publishing-to-package-registries + +name: Test Build + +on: + pull_request: + branches: [develop] + types: [review_requested, ready_for_review] + paths-ignore: + - 'docs/**' + - 'website/**' + - 'vendor/**' + +jobs: + Windows-latest: + + runs-on: windows-latest + strategy: + matrix: + python-version: [3.7] + + steps: + - name: πŸš› Checkout Code + uses: actions/checkout@v2 + + - name: Set up Python + uses: actions/setup-python@v2 + with: + python-version: ${{ matrix.python-version }} + + - name: 🧡 Install Requirements + shell: pwsh + run: | + ./tools/create_env.ps1 + + - name: πŸ”¨ Build + shell: pwsh + run: | + ./tools/build.ps1 + + Ubuntu-latest: + + runs-on: ubuntu-latest + strategy: + matrix: + python-version: [3.7] + + steps: + - name: πŸš› Checkout Code + uses: actions/checkout@v2 + + - name: Set up Python + uses: actions/setup-python@v2 + with: + python-version: ${{ matrix.python-version }} + + - name: 🧡 Install Requirements + run: | + ./tools/create_env.sh + + - name: πŸ”¨ Build + run: | + ./tools/build.sh + + # MacOS-latest: + + # runs-on: macos-latest + # strategy: + # matrix: + # python-version: [3.7] + + # steps: + # - name: πŸš› Checkout Code + # uses: actions/checkout@v2 + + # - name: Set up Python + # uses: actions/setup-python@v2 + # with: + # python-version: ${{ matrix.python-version }} + + # - name: 🧡 Install Requirements + # run: | + # ./tools/create_env.sh + + # - name: πŸ”¨ Build + # run: | + # ./tools/build.sh \ No newline at end of file diff --git a/.gitignore b/.gitignore index bc20e13873..ebb47e55d2 100644 --- a/.gitignore +++ b/.gitignore @@ -64,10 +64,9 @@ coverage.xml .hypothesis/ .pytest_cache/ - # Node JS packages ################## -node_modules/ +node_modules package-lock.json openpype/premiere/ppro/js/debug.log @@ -81,4 +80,15 @@ openpype/premiere/ppro/js/debug.log .vscode/ .env dump.sql -test_localsystem.txt \ No newline at end of file +test_localsystem.txt + +# website +########## +website/translated_docs +website/build/ +website/node_modules +website/i18n/* + +website/debug.log + +website/.docusaurus \ No newline at end of file diff --git a/.gitmodules b/.gitmodules index d286419ee5..7e6b7cb861 100644 --- a/.gitmodules +++ b/.gitmodules @@ -1,13 +1,13 @@ [submodule "repos/avalon-core"] path = repos/avalon-core - url = git@github.com:pypeclub/avalon-core.git + url = https://github.com/pypeclub/avalon-core.git branch = develop [submodule "repos/avalon-unreal-integration"] path = repos/avalon-unreal-integration - url = git@github.com:pypeclub/avalon-unreal-integration.git + url = https://github.com/pypeclub/avalon-unreal-integration.git [submodule "openpype/modules/ftrack/python2_vendor/ftrack-python-api"] path = openpype/modules/ftrack/python2_vendor/ftrack-python-api url = https://bitbucket.org/ftrack/ftrack-python-api.git [submodule "openpype/modules/ftrack/python2_vendor/arrow"] path = openpype/modules/ftrack/python2_vendor/arrow - url = git@github.com:arrow-py/arrow.git \ No newline at end of file + url = https://github.com/arrow-py/arrow.git \ No newline at end of file diff --git a/CHANGELOG.md b/CHANGELOG.md index 8d7ba1a905..25c055a0aa 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -2,15 +2,27 @@ ## 3.0.0 - Unreleased -**Major Features:** ### Configuration - Studio Settings GUI: no more json configuration files -- Local Settings GUI - OpenPype Modules can be turned on and off -- Easy to add Applications +- Easy to add Application versions - Per Project Environment and plugin management - Robust profile system for creating reviewables and burnins, with filtering based on Application, Task and data family -Configurable publish plugins. Options to make any validator or extractor, optional or disabled. +Configurable publish plugins. +- Options to make any validator or extractor, optional or disabled. +- Color Management is now unified under anatomy settings. +- Subset naming and gouping +- Too many individual configurable option to list in this changelog :) +- All project attributes can now be set directly in OpenPype settings. +- Studio Setting can be locked to prevent unwanted artist changes. + + +### Local Settings +- Local Settings GUI where users can change certain option on individual basis + - Application executables + - Project roots + - Project site sync settings + ### Build, Installation and Deployments - No requirements on artist machine - Fully distributed workflow possible @@ -20,10 +32,73 @@ Configurable publish plugins. Options to make any validator or extractor, option - Easy Build system - Safe versioning system with staging and production options -### Site Sync +### Misc +- System and diagnostic info tool in the tray +- Launching application from Launcher indicates activity +- All project roots are now named. Single root project are now achieved by having a single named root in the project anatomy. +- Basic support for task types, on top of task names. +- Timer now change automatically when the context is switched inside running application +- 'Master" versions have been renamed to "Hero" -to be continued... +### Ftrack +- Actions have customisable roles. +- Settings on all actions are updated live and don't need openpype restart. +### Editorial +- Fully OTIO based editorial publishing. +- Completely re-done Hiero publishing to be a lot simpler and faster. +- Consistent conforming from Resolve, Hiero and Standalone Publisher * + +### Backend +- OpenPype and Avalon now always share the same database (in 2.x is was possible to split them) +- Major codebase refactoring to allow for better CI, versioning and control of individual integrations +- OTIO is bundled with build +- OIIO is bundled with build +- FFMPEG is bundled with build +- Rest API and host WebSocket servers have been unified into a single local webserver +- Maya look assigner has been integrated into the main codebase +- Publish GUI has been integrated into the main codebase +- Studio and Project settings overrides are now stored in Mongo + +### Site Sync (beta) +- Synchronisation of published files between workstations and central storage. +- Ability to add arbitrary storage providers to the Site Sync system. +- Default setup includes Disk and Google Drive providers as examples. +- Access to availability information from Loader and Scene Manager. +- Download / Upload queue with filtering, error and status reporting. +- Site sync can be configured on a per-project basis +- Bulk upload and download from the loader + + +## [2.16.1](https://github.com/pypeclub/pype/tree/2.16.1) (2021-04-13) + +[Full Changelog](https://github.com/pypeclub/pype/compare/2.16.0...2.16.1) + +**Enhancements:** + +- Nuke: comp renders mix up [\#1301](https://github.com/pypeclub/pype/pull/1301) +- Validate project settings [\#1297](https://github.com/pypeclub/pype/pull/1297) +- After Effects: added SubsetManager [\#1234](https://github.com/pypeclub/pype/pull/1234) + +**Fixed bugs:** + +- Ftrack custom attributes in bulks [\#1312](https://github.com/pypeclub/pype/pull/1312) +- Ftrack optional pypclub role [\#1303](https://github.com/pypeclub/pype/pull/1303) +- AE remove orphaned instance from workfile - fix self.stub [\#1282](https://github.com/pypeclub/pype/pull/1282) +- Avalon schema names [\#1242](https://github.com/pypeclub/pype/pull/1242) +- Handle duplication of Task name [\#1226](https://github.com/pypeclub/pype/pull/1226) +- Modified path of plugin loads for Harmony and TVPaint [\#1217](https://github.com/pypeclub/pype/pull/1217) +- Regex checks in profiles filtering [\#1214](https://github.com/pypeclub/pype/pull/1214) +- Bulk mov strict task [\#1204](https://github.com/pypeclub/pype/pull/1204) +- Update custom ftrack session attributes [\#1202](https://github.com/pypeclub/pype/pull/1202) +- Nuke: write node colorspace ignore `default\(\)` label [\#1199](https://github.com/pypeclub/pype/pull/1199) +- Nuke: reverse search to make it more versatile [\#1178](https://github.com/pypeclub/pype/pull/1178) + +**Merged pull requests:** + +- Forward compatible ftrack group [\#1243](https://github.com/pypeclub/pype/pull/1243) +- Error message in pyblish UI [\#1206](https://github.com/pypeclub/pype/pull/1206) +- Nuke: deadline submission with search replaced env values from preset [\#1194](https://github.com/pypeclub/pype/pull/1194) ## [2.16.0](https://github.com/pypeclub/pype/tree/2.16.0) (2021-03-22) @@ -241,19 +316,19 @@ to be continued... - Maya: Vray handling of default aov [\#748](https://github.com/pypeclub/pype/pull/748) - Maya: multiple renderable cameras in layer didn't work [\#744](https://github.com/pypeclub/pype/pull/744) - Ftrack integrate custom attributes fix [\#742](https://github.com/pypeclub/pype/pull/742) -## [2.14.0](https://github.com/pypeclub/pype/tree/2.14.0) (2020-11-24) + +## [2.14.0](https://github.com/pypeclub/pype/tree/2.14.0) (2020-11-23) [Full Changelog](https://github.com/pypeclub/pype/compare/2.13.7...2.14.0) **Enhancements:** +- Render publish plugins abstraction [\#687](https://github.com/pypeclub/pype/pull/687) - Shot asset build trigger status [\#736](https://github.com/pypeclub/pype/pull/736) - Maya: add camera rig publishing option [\#721](https://github.com/pypeclub/pype/pull/721) - Sort instances by label in pyblish gui [\#719](https://github.com/pypeclub/pype/pull/719) - Synchronize ftrack hierarchical and shot attributes [\#716](https://github.com/pypeclub/pype/pull/716) - 686 standalonepublisher editorial from image sequences [\#699](https://github.com/pypeclub/pype/pull/699) -- TV Paint: initial implementation of creators and local rendering [\#693](https://github.com/pypeclub/pype/pull/693) -- Render publish plugins abstraction [\#687](https://github.com/pypeclub/pype/pull/687) - Ask user to select non-default camera from scene or create a new. [\#678](https://github.com/pypeclub/pype/pull/678) - TVPaint: image loader with options [\#675](https://github.com/pypeclub/pype/pull/675) - Maya: Camera name can be added to burnins. [\#674](https://github.com/pypeclub/pype/pull/674) @@ -262,25 +337,33 @@ to be continued... **Fixed bugs:** +- Bugfix Hiero Review / Plate representation publish [\#743](https://github.com/pypeclub/pype/pull/743) +- Asset fetch second fix [\#726](https://github.com/pypeclub/pype/pull/726) - TVPaint extract review fix [\#740](https://github.com/pypeclub/pype/pull/740) - After Effects: Review were not being sent to ftrack [\#738](https://github.com/pypeclub/pype/pull/738) -- Asset fetch second fix [\#726](https://github.com/pypeclub/pype/pull/726) - Maya: vray proxy was not loading [\#722](https://github.com/pypeclub/pype/pull/722) - Maya: Vray expected file fixes [\#682](https://github.com/pypeclub/pype/pull/682) +- Missing audio on farm submission. [\#639](https://github.com/pypeclub/pype/pull/639) **Deprecated:** - Removed artist view from pyblish gui [\#717](https://github.com/pypeclub/pype/pull/717) - Maya: disable legacy override check for cameras [\#715](https://github.com/pypeclub/pype/pull/715) +**Merged pull requests:** + +- Application manager [\#728](https://github.com/pypeclub/pype/pull/728) +- Feature \#664 3.0 lib refactor [\#706](https://github.com/pypeclub/pype/pull/706) +- Lib from illicit part 2 [\#700](https://github.com/pypeclub/pype/pull/700) +- 3.0 lib refactor - path tools [\#697](https://github.com/pypeclub/pype/pull/697) ## [2.13.7](https://github.com/pypeclub/pype/tree/2.13.7) (2020-11-19) [Full Changelog](https://github.com/pypeclub/pype/compare/2.13.6...2.13.7) -**Merged pull requests:** +**Fixed bugs:** -- fix\(SP\): getting fps from context instead of nonexistent entity [\#729](https://github.com/pypeclub/pype/pull/729) +- Standalone Publisher: getting fps from context instead of nonexistent entity [\#729](https://github.com/pypeclub/pype/pull/729) # Changelog diff --git a/HISTORY.md b/HISTORY.md index b8b96fb4c3..053059a9ea 100644 --- a/HISTORY.md +++ b/HISTORY.md @@ -1,3 +1,268 @@ +## [2.16.0](https://github.com/pypeclub/pype/tree/2.16.0) (2021-03-22) + +[Full Changelog](https://github.com/pypeclub/pype/compare/2.15.3...2.16.0) + +**Enhancements:** + +- Nuke: deadline submit limit group filter [\#1167](https://github.com/pypeclub/pype/pull/1167) +- Maya: support for Deadline Group and Limit Groups - backport 2.x [\#1156](https://github.com/pypeclub/pype/pull/1156) +- Maya: fixes for Redshift support [\#1152](https://github.com/pypeclub/pype/pull/1152) +- Nuke: adding preset for a Read node name to all img and mov Loaders [\#1146](https://github.com/pypeclub/pype/pull/1146) +- nuke deadline submit with environ var from presets overrides [\#1142](https://github.com/pypeclub/pype/pull/1142) +- Change timers after task change [\#1138](https://github.com/pypeclub/pype/pull/1138) +- Nuke: shortcuts for Pype menu [\#1127](https://github.com/pypeclub/pype/pull/1127) +- Nuke: workfile template [\#1124](https://github.com/pypeclub/pype/pull/1124) +- Sites local settings by site name [\#1117](https://github.com/pypeclub/pype/pull/1117) +- Reset loader's asset selection on context change [\#1106](https://github.com/pypeclub/pype/pull/1106) +- Bulk mov render publishing [\#1101](https://github.com/pypeclub/pype/pull/1101) +- Photoshop: mark publishable instances [\#1093](https://github.com/pypeclub/pype/pull/1093) +- Added ability to define BG color for extract review [\#1088](https://github.com/pypeclub/pype/pull/1088) +- TVPaint extractor enhancement [\#1080](https://github.com/pypeclub/pype/pull/1080) +- Photoshop: added support for .psb in workfiles [\#1078](https://github.com/pypeclub/pype/pull/1078) +- Optionally add task to subset name [\#1072](https://github.com/pypeclub/pype/pull/1072) +- Only extend clip range when collecting. [\#1008](https://github.com/pypeclub/pype/pull/1008) +- Collect audio for farm reviews. [\#1073](https://github.com/pypeclub/pype/pull/1073) + + +**Fixed bugs:** + +- Fix path spaces in jpeg extractor [\#1174](https://github.com/pypeclub/pype/pull/1174) +- Maya: Bugfix: superclass for CreateCameraRig [\#1166](https://github.com/pypeclub/pype/pull/1166) +- Maya: Submit to Deadline - fix typo in condition [\#1163](https://github.com/pypeclub/pype/pull/1163) +- Avoid dot in repre extension [\#1125](https://github.com/pypeclub/pype/pull/1125) +- Fix versions variable usage in standalone publisher [\#1090](https://github.com/pypeclub/pype/pull/1090) +- Collect instance data fix subset query [\#1082](https://github.com/pypeclub/pype/pull/1082) +- Fix getting the camera name. [\#1067](https://github.com/pypeclub/pype/pull/1067) +- Nuke: Ensure "NUKE\_TEMP\_DIR" is not part of the Deadline job environment. [\#1064](https://github.com/pypeclub/pype/pull/1064) + +## [2.15.3](https://github.com/pypeclub/pype/tree/2.15.3) (2021-02-26) + +[Full Changelog](https://github.com/pypeclub/pype/compare/2.15.2...2.15.3) + +**Enhancements:** + +- Maya: speedup renderable camera collection [\#1053](https://github.com/pypeclub/pype/pull/1053) +- Harmony - add regex search to filter allowed task names for collectin… [\#1047](https://github.com/pypeclub/pype/pull/1047) + +**Fixed bugs:** + +- Ftrack integrate hierarchy fix [\#1085](https://github.com/pypeclub/pype/pull/1085) +- Explicit subset filter in anatomy instance data [\#1059](https://github.com/pypeclub/pype/pull/1059) +- TVPaint frame offset [\#1057](https://github.com/pypeclub/pype/pull/1057) +- Auto fix unicode strings [\#1046](https://github.com/pypeclub/pype/pull/1046) + +## [2.15.2](https://github.com/pypeclub/pype/tree/2.15.2) (2021-02-19) + +[Full Changelog](https://github.com/pypeclub/pype/compare/2.15.1...2.15.2) + +**Enhancements:** + +- Maya: Vray scene publishing [\#1013](https://github.com/pypeclub/pype/pull/1013) + +**Fixed bugs:** + +- Fix entity move under project [\#1040](https://github.com/pypeclub/pype/pull/1040) +- smaller nuke fixes from production [\#1036](https://github.com/pypeclub/pype/pull/1036) +- TVPaint thumbnail extract fix [\#1031](https://github.com/pypeclub/pype/pull/1031) + +## [2.15.1](https://github.com/pypeclub/pype/tree/2.15.1) (2021-02-12) + +[Full Changelog](https://github.com/pypeclub/pype/compare/2.15.0...2.15.1) + +**Enhancements:** + +- Delete version as loader action [\#1011](https://github.com/pypeclub/pype/pull/1011) +- Delete old versions [\#445](https://github.com/pypeclub/pype/pull/445) + +**Fixed bugs:** + +- PS - remove obsolete functions from pywin32 [\#1006](https://github.com/pypeclub/pype/pull/1006) +- Clone description of review session objects. [\#922](https://github.com/pypeclub/pype/pull/922) + +## [2.15.0](https://github.com/pypeclub/pype/tree/2.15.0) (2021-02-09) + +[Full Changelog](https://github.com/pypeclub/pype/compare/2.14.6...2.15.0) + +**Enhancements:** + +- Resolve - loading and updating clips [\#932](https://github.com/pypeclub/pype/pull/932) +- Release/2.15.0 [\#926](https://github.com/pypeclub/pype/pull/926) +- Photoshop: add option for template.psd and prelaunch hook [\#894](https://github.com/pypeclub/pype/pull/894) +- Nuke: deadline presets [\#993](https://github.com/pypeclub/pype/pull/993) +- Maya: Alembic only set attributes that exists. [\#986](https://github.com/pypeclub/pype/pull/986) +- Harmony: render local and handle fixes [\#981](https://github.com/pypeclub/pype/pull/981) +- PSD Bulk export of ANIM group [\#965](https://github.com/pypeclub/pype/pull/965) +- AE - added prelaunch hook for opening last or workfile from template [\#944](https://github.com/pypeclub/pype/pull/944) +- PS - safer handling of loading of workfile [\#941](https://github.com/pypeclub/pype/pull/941) +- Maya: Handling Arnold referenced AOVs [\#938](https://github.com/pypeclub/pype/pull/938) +- TVPaint: switch layer IDs for layer names during identification [\#903](https://github.com/pypeclub/pype/pull/903) +- TVPaint audio/sound loader [\#893](https://github.com/pypeclub/pype/pull/893) +- Clone review session with children. [\#891](https://github.com/pypeclub/pype/pull/891) +- Simple compositing data packager for freelancers [\#884](https://github.com/pypeclub/pype/pull/884) +- Harmony deadline submission [\#881](https://github.com/pypeclub/pype/pull/881) +- Maya: Optionally hide image planes from reviews. [\#840](https://github.com/pypeclub/pype/pull/840) +- Maya: handle referenced AOVs for Vray [\#824](https://github.com/pypeclub/pype/pull/824) +- DWAA/DWAB support on windows [\#795](https://github.com/pypeclub/pype/pull/795) +- Unreal: animation, layout and setdress updates [\#695](https://github.com/pypeclub/pype/pull/695) + +**Fixed bugs:** + +- Maya: Looks - disable hardlinks [\#995](https://github.com/pypeclub/pype/pull/995) +- Fix Ftrack custom attribute update [\#982](https://github.com/pypeclub/pype/pull/982) +- Prores ks in burnin script [\#960](https://github.com/pypeclub/pype/pull/960) +- terminal.py crash on import [\#839](https://github.com/pypeclub/pype/pull/839) +- Extract review handle bizarre pixel aspect ratio [\#990](https://github.com/pypeclub/pype/pull/990) +- Nuke: add nuke related env var to sumbission [\#988](https://github.com/pypeclub/pype/pull/988) +- Nuke: missing preset's variable [\#984](https://github.com/pypeclub/pype/pull/984) +- Get creator by name fix [\#979](https://github.com/pypeclub/pype/pull/979) +- Fix update of project's tasks on Ftrack sync [\#972](https://github.com/pypeclub/pype/pull/972) +- nuke: wrong frame offset in mov loader [\#971](https://github.com/pypeclub/pype/pull/971) +- Create project structure action fix multiroot [\#967](https://github.com/pypeclub/pype/pull/967) +- PS: remove pywin installation from hook [\#964](https://github.com/pypeclub/pype/pull/964) +- Prores ks in burnin script [\#959](https://github.com/pypeclub/pype/pull/959) +- Subset family is now stored in subset document [\#956](https://github.com/pypeclub/pype/pull/956) +- DJV new version arguments [\#954](https://github.com/pypeclub/pype/pull/954) +- TV Paint: Fix single frame Sequence [\#953](https://github.com/pypeclub/pype/pull/953) +- nuke: missing `file` knob update [\#933](https://github.com/pypeclub/pype/pull/933) +- Photoshop: Create from single layer was failing [\#920](https://github.com/pypeclub/pype/pull/920) +- Nuke: baking mov with correct colorspace inherited from write [\#909](https://github.com/pypeclub/pype/pull/909) +- Launcher fix actions discover [\#896](https://github.com/pypeclub/pype/pull/896) +- Get the correct file path for the updated mov. [\#889](https://github.com/pypeclub/pype/pull/889) +- Maya: Deadline submitter - shared data access violation [\#831](https://github.com/pypeclub/pype/pull/831) +- Maya: Take into account vray master AOV switch [\#822](https://github.com/pypeclub/pype/pull/822) + +**Merged pull requests:** + +- Refactor blender to 3.0 format [\#934](https://github.com/pypeclub/pype/pull/934) + +## [2.14.6](https://github.com/pypeclub/pype/tree/2.14.6) (2021-01-15) + +[Full Changelog](https://github.com/pypeclub/pype/compare/2.14.5...2.14.6) + +**Fixed bugs:** + +- Nuke: improving of hashing path [\#885](https://github.com/pypeclub/pype/pull/885) + +**Merged pull requests:** + +- Hiero: cut videos with correct secons [\#892](https://github.com/pypeclub/pype/pull/892) +- Faster sync to avalon preparation [\#869](https://github.com/pypeclub/pype/pull/869) + +## [2.14.5](https://github.com/pypeclub/pype/tree/2.14.5) (2021-01-06) + +[Full Changelog](https://github.com/pypeclub/pype/compare/2.14.4...2.14.5) + +**Merged pull requests:** + +- Pype logger refactor [\#866](https://github.com/pypeclub/pype/pull/866) + +## [2.14.4](https://github.com/pypeclub/pype/tree/2.14.4) (2020-12-18) + +[Full Changelog](https://github.com/pypeclub/pype/compare/2.14.3...2.14.4) + +**Merged pull requests:** + +- Fix - AE - added explicit cast to int [\#837](https://github.com/pypeclub/pype/pull/837) + +## [2.14.3](https://github.com/pypeclub/pype/tree/2.14.3) (2020-12-16) + +[Full Changelog](https://github.com/pypeclub/pype/compare/2.14.2...2.14.3) + +**Fixed bugs:** + +- TVPaint repair invalid metadata [\#809](https://github.com/pypeclub/pype/pull/809) +- Feature/push hier value to nonhier action [\#807](https://github.com/pypeclub/pype/pull/807) +- Harmony: fix palette and image sequence loader [\#806](https://github.com/pypeclub/pype/pull/806) + +**Merged pull requests:** + +- respecting space in path [\#823](https://github.com/pypeclub/pype/pull/823) + +## [2.14.2](https://github.com/pypeclub/pype/tree/2.14.2) (2020-12-04) + +[Full Changelog](https://github.com/pypeclub/pype/compare/2.14.1...2.14.2) + +**Enhancements:** + +- Collapsible wrapper in settings [\#767](https://github.com/pypeclub/pype/pull/767) + +**Fixed bugs:** + +- Harmony: template extraction and palettes thumbnails on mac [\#768](https://github.com/pypeclub/pype/pull/768) +- TVPaint store context to workfile metadata \(764\) [\#766](https://github.com/pypeclub/pype/pull/766) +- Extract review audio cut fix [\#763](https://github.com/pypeclub/pype/pull/763) + +**Merged pull requests:** + +- AE: fix publish after background load [\#781](https://github.com/pypeclub/pype/pull/781) +- TVPaint store members key [\#769](https://github.com/pypeclub/pype/pull/769) + +## [2.14.1](https://github.com/pypeclub/pype/tree/2.14.1) (2020-11-27) + +[Full Changelog](https://github.com/pypeclub/pype/compare/2.14.0...2.14.1) + +**Enhancements:** + +- Settings required keys in modifiable dict [\#770](https://github.com/pypeclub/pype/pull/770) +- Extract review may not add audio to output [\#761](https://github.com/pypeclub/pype/pull/761) + +**Fixed bugs:** + +- After Effects: frame range, file format and render source scene fixes [\#760](https://github.com/pypeclub/pype/pull/760) +- Hiero: trimming review with clip event number [\#754](https://github.com/pypeclub/pype/pull/754) +- TVPaint: fix updating of loaded subsets [\#752](https://github.com/pypeclub/pype/pull/752) +- Maya: Vray handling of default aov [\#748](https://github.com/pypeclub/pype/pull/748) +- Maya: multiple renderable cameras in layer didn't work [\#744](https://github.com/pypeclub/pype/pull/744) +- Ftrack integrate custom attributes fix [\#742](https://github.com/pypeclub/pype/pull/742) + +## [2.14.0](https://github.com/pypeclub/pype/tree/2.14.0) (2020-11-23) + +[Full Changelog](https://github.com/pypeclub/pype/compare/2.13.7...2.14.0) + +**Enhancements:** + +- Render publish plugins abstraction [\#687](https://github.com/pypeclub/pype/pull/687) +- Shot asset build trigger status [\#736](https://github.com/pypeclub/pype/pull/736) +- Maya: add camera rig publishing option [\#721](https://github.com/pypeclub/pype/pull/721) +- Sort instances by label in pyblish gui [\#719](https://github.com/pypeclub/pype/pull/719) +- Synchronize ftrack hierarchical and shot attributes [\#716](https://github.com/pypeclub/pype/pull/716) +- 686 standalonepublisher editorial from image sequences [\#699](https://github.com/pypeclub/pype/pull/699) +- Ask user to select non-default camera from scene or create a new. [\#678](https://github.com/pypeclub/pype/pull/678) +- TVPaint: image loader with options [\#675](https://github.com/pypeclub/pype/pull/675) +- Maya: Camera name can be added to burnins. [\#674](https://github.com/pypeclub/pype/pull/674) +- After Effects: base integration with loaders [\#667](https://github.com/pypeclub/pype/pull/667) +- Harmony: Javascript refactoring and overall stability improvements [\#666](https://github.com/pypeclub/pype/pull/666) + +**Fixed bugs:** + +- Bugfix Hiero Review / Plate representation publish [\#743](https://github.com/pypeclub/pype/pull/743) +- Asset fetch second fix [\#726](https://github.com/pypeclub/pype/pull/726) +- TVPaint extract review fix [\#740](https://github.com/pypeclub/pype/pull/740) +- After Effects: Review were not being sent to ftrack [\#738](https://github.com/pypeclub/pype/pull/738) +- Maya: vray proxy was not loading [\#722](https://github.com/pypeclub/pype/pull/722) +- Maya: Vray expected file fixes [\#682](https://github.com/pypeclub/pype/pull/682) +- Missing audio on farm submission. [\#639](https://github.com/pypeclub/pype/pull/639) + +**Deprecated:** + +- Removed artist view from pyblish gui [\#717](https://github.com/pypeclub/pype/pull/717) +- Maya: disable legacy override check for cameras [\#715](https://github.com/pypeclub/pype/pull/715) + +**Merged pull requests:** + +- Application manager [\#728](https://github.com/pypeclub/pype/pull/728) +- Feature \#664 3.0 lib refactor [\#706](https://github.com/pypeclub/pype/pull/706) +- Lib from illicit part 2 [\#700](https://github.com/pypeclub/pype/pull/700) +- 3.0 lib refactor - path tools [\#697](https://github.com/pypeclub/pype/pull/697) + +## [2.13.7](https://github.com/pypeclub/pype/tree/2.13.7) (2020-11-19) + +[Full Changelog](https://github.com/pypeclub/pype/compare/2.13.6...2.13.7) + +**Fixed bugs:** + +- Standalone Publisher: getting fps from context instead of nonexistent entity [\#729](https://github.com/pypeclub/pype/pull/729) + # Changelog ## [2.13.6](https://github.com/pypeclub/pype/tree/2.13.6) (2020-11-15) @@ -789,4 +1054,7 @@ A large cleanup release. Most of the change are under the hood. - _(avalon)_ subsets in maya 2019 weren't behaving correctly in the outliner +\* *This Changelog was automatically generated by [github_changelog_generator](https://github.com/github-changelog-generator/github-changelog-generator)* + + \* *This Changelog was automatically generated by [github_changelog_generator](https://github.com/github-changelog-generator/github-changelog-generator)* diff --git a/README.md b/README.md index aae79f2358..566e226538 100644 --- a/README.md +++ b/README.md @@ -2,6 +2,10 @@ OpenPype ==== +[![documentation](https://github.com/pypeclub/pype/actions/workflows/documentation.yml/badge.svg)](https://github.com/pypeclub/pype/actions/workflows/documentation.yml) ![GitHub Requirements](https://img.shields.io/requires/github/pypeclub/pype?labelColor=303846) ![GitHub VFX Platform](https://img.shields.io/badge/vfx%20platform-2021-lightgrey?labelColor=303846) + + + Introduction ------------ @@ -61,7 +65,8 @@ git clone --recurse-submodules git@github.com:Pypeclub/OpenPype.git #### To build OpenPype: 1) Run `.\tools\create_env.ps1` to create virtual environment in `.\venv` -2) Run `.\tools\build.ps1` to build OpenPype executables in `.\build\` +2) Run `.\tools\fetch_thirdparty_libs.ps1` to download third-party dependencies like ffmpeg and oiio. Those will be included in build. +3) Run `.\tools\build.ps1` to build OpenPype executables in `.\build\` To create distributable OpenPype versions, run `./tools/create_zip.ps1` - that will create zip file with name `openpype-vx.x.x.zip` parsed from current OpenPype repository and @@ -116,8 +121,8 @@ pyenv local 3.7.9 #### To build OpenPype: 1) Run `.\tools\create_env.sh` to create virtual environment in `.\venv` -2) Run `.\tools\build.sh` to build OpenPype executables in `.\build\` - +2) Run `.\tools\fetch_thirdparty_libs.sh` to download third-party dependencies like ffmpeg and oiio. Those will be included in build. +3) Run `.\tools\build.sh` to build OpenPype executables in `.\build\` ### Linux @@ -166,14 +171,23 @@ sudo yum install qt5-qtbase-devel
Use pyenv to install Python version for OpenPype build -You will need **bzip2**, **readline** and **sqlite3** libraries. +You will need **bzip2**, **readline**, **sqlite3** and other libraries. -**Ubuntu:** +For more details about Python build environments see: + +https://github.com/pyenv/pyenv/wiki#suggested-build-environment + +**For Ubuntu:** ```sh -sudo apt install libbz2-dev libreadline-dev libsqlite3-dev +sudo apt-get update; sudo apt-get install --no-install-recommends make build-essential libssl-dev zlib1g-dev libbz2-dev libreadline-dev libsqlite3-dev wget curl llvm libncurses5-dev xz-utils tk-dev libxml2-dev libxmlsec1-dev libffi-dev liblzma-dev ``` -1) install **pyenv** +**For Centos:** +```sh +yum install gcc zlib-devel bzip2 bzip2-devel readline-devel sqlite sqlite-devel openssl-devel tk-devel libffi-devel +``` + +**install pyenv** ```sh curl https://pyenv.run | bash diff --git a/igniter/Poppins/OFL.txt b/igniter/Poppins/OFL.txt new file mode 100644 index 0000000000..76df3b5656 --- /dev/null +++ b/igniter/Poppins/OFL.txt @@ -0,0 +1,93 @@ +Copyright 2020 The Poppins Project Authors (https://github.com/itfoundry/Poppins) + +This Font Software is licensed under the SIL Open Font License, Version 1.1. +This license is copied below, and is also available with a FAQ at: +http://scripts.sil.org/OFL + + +----------------------------------------------------------- +SIL OPEN FONT LICENSE Version 1.1 - 26 February 2007 +----------------------------------------------------------- + +PREAMBLE +The goals of the Open Font License (OFL) are to stimulate worldwide +development of collaborative font projects, to support the font creation +efforts of academic and linguistic communities, and to provide a free and +open framework in which fonts may be shared and improved in partnership +with others. + +The OFL allows the licensed fonts to be used, studied, modified and +redistributed freely as long as they are not sold by themselves. The +fonts, including any derivative works, can be bundled, embedded, +redistributed and/or sold with any software provided that any reserved +names are not used by derivative works. The fonts and derivatives, +however, cannot be released under any other type of license. The +requirement for fonts to remain under this license does not apply +to any document created using the fonts or their derivatives. + +DEFINITIONS +"Font Software" refers to the set of files released by the Copyright +Holder(s) under this license and clearly marked as such. This may +include source files, build scripts and documentation. + +"Reserved Font Name" refers to any names specified as such after the +copyright statement(s). + +"Original Version" refers to the collection of Font Software components as +distributed by the Copyright Holder(s). + +"Modified Version" refers to any derivative made by adding to, deleting, +or substituting -- in part or in whole -- any of the components of the +Original Version, by changing formats or by porting the Font Software to a +new environment. + +"Author" refers to any designer, engineer, programmer, technical +writer or other person who contributed to the Font Software. + +PERMISSION & CONDITIONS +Permission is hereby granted, free of charge, to any person obtaining +a copy of the Font Software, to use, study, copy, merge, embed, modify, +redistribute, and sell modified and unmodified copies of the Font +Software, subject to the following conditions: + +1) Neither the Font Software nor any of its individual components, +in Original or Modified Versions, may be sold by itself. + +2) Original or Modified Versions of the Font Software may be bundled, +redistributed and/or sold with any software, provided that each copy +contains the above copyright notice and this license. These can be +included either as stand-alone text files, human-readable headers or +in the appropriate machine-readable metadata fields within text or +binary files as long as those fields can be easily viewed by the user. + +3) No Modified Version of the Font Software may use the Reserved Font +Name(s) unless explicit written permission is granted by the corresponding +Copyright Holder. This restriction only applies to the primary font name as +presented to the users. + +4) The name(s) of the Copyright Holder(s) or the Author(s) of the Font +Software shall not be used to promote, endorse or advertise any +Modified Version, except to acknowledge the contribution(s) of the +Copyright Holder(s) and the Author(s) or with their explicit written +permission. + +5) The Font Software, modified or unmodified, in part or in whole, +must be distributed entirely under this license, and must not be +distributed under any other license. The requirement for fonts to +remain under this license does not apply to any document created +using the Font Software. + +TERMINATION +This license becomes null and void if any of the above conditions are +not met. + +DISCLAIMER +THE FONT SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, +EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO ANY WARRANTIES OF +MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT +OF COPYRIGHT, PATENT, TRADEMARK, OR OTHER RIGHT. IN NO EVENT SHALL THE +COPYRIGHT HOLDER BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, +INCLUDING ANY GENERAL, SPECIAL, INDIRECT, INCIDENTAL, OR CONSEQUENTIAL +DAMAGES, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING +FROM, OUT OF THE USE OR INABILITY TO USE THE FONT SOFTWARE OR FROM +OTHER DEALINGS IN THE FONT SOFTWARE. diff --git a/igniter/Poppins/Poppins-Black.ttf b/igniter/Poppins/Poppins-Black.ttf new file mode 100644 index 0000000000..a9520b78ac Binary files /dev/null and b/igniter/Poppins/Poppins-Black.ttf differ diff --git a/igniter/Poppins/Poppins-BlackItalic.ttf b/igniter/Poppins/Poppins-BlackItalic.ttf new file mode 100644 index 0000000000..ebfdd707e5 Binary files /dev/null and b/igniter/Poppins/Poppins-BlackItalic.ttf differ diff --git a/igniter/Poppins/Poppins-Bold.ttf b/igniter/Poppins/Poppins-Bold.ttf new file mode 100644 index 0000000000..b94d47f3af Binary files /dev/null and b/igniter/Poppins/Poppins-Bold.ttf differ diff --git a/igniter/Poppins/Poppins-BoldItalic.ttf b/igniter/Poppins/Poppins-BoldItalic.ttf new file mode 100644 index 0000000000..e2e64456c7 Binary files /dev/null and b/igniter/Poppins/Poppins-BoldItalic.ttf differ diff --git a/igniter/Poppins/Poppins-ExtraBold.ttf b/igniter/Poppins/Poppins-ExtraBold.ttf new file mode 100644 index 0000000000..8f008c3684 Binary files /dev/null and b/igniter/Poppins/Poppins-ExtraBold.ttf differ diff --git a/igniter/Poppins/Poppins-ExtraBoldItalic.ttf b/igniter/Poppins/Poppins-ExtraBoldItalic.ttf new file mode 100644 index 0000000000..b2a9bf557a Binary files /dev/null and b/igniter/Poppins/Poppins-ExtraBoldItalic.ttf differ diff --git a/igniter/Poppins/Poppins-ExtraLight.ttf b/igniter/Poppins/Poppins-ExtraLight.ttf new file mode 100644 index 0000000000..ee6238251f Binary files /dev/null and b/igniter/Poppins/Poppins-ExtraLight.ttf differ diff --git a/igniter/Poppins/Poppins-ExtraLightItalic.ttf b/igniter/Poppins/Poppins-ExtraLightItalic.ttf new file mode 100644 index 0000000000..e392492abd Binary files /dev/null and b/igniter/Poppins/Poppins-ExtraLightItalic.ttf differ diff --git a/igniter/Poppins/Poppins-Italic.ttf b/igniter/Poppins/Poppins-Italic.ttf new file mode 100644 index 0000000000..46203996d3 Binary files /dev/null and b/igniter/Poppins/Poppins-Italic.ttf differ diff --git a/igniter/Poppins/Poppins-Light.ttf b/igniter/Poppins/Poppins-Light.ttf new file mode 100644 index 0000000000..2ab022196b Binary files /dev/null and b/igniter/Poppins/Poppins-Light.ttf differ diff --git a/igniter/Poppins/Poppins-LightItalic.ttf b/igniter/Poppins/Poppins-LightItalic.ttf new file mode 100644 index 0000000000..6f9279daef Binary files /dev/null and b/igniter/Poppins/Poppins-LightItalic.ttf differ diff --git a/igniter/Poppins/Poppins-Medium.ttf b/igniter/Poppins/Poppins-Medium.ttf new file mode 100644 index 0000000000..e90e87ed69 Binary files /dev/null and b/igniter/Poppins/Poppins-Medium.ttf differ diff --git a/igniter/Poppins/Poppins-MediumItalic.ttf b/igniter/Poppins/Poppins-MediumItalic.ttf new file mode 100644 index 0000000000..d8a251c7c4 Binary files /dev/null and b/igniter/Poppins/Poppins-MediumItalic.ttf differ diff --git a/igniter/Poppins/Poppins-Regular.ttf b/igniter/Poppins/Poppins-Regular.ttf new file mode 100644 index 0000000000..be06e7fdca Binary files /dev/null and b/igniter/Poppins/Poppins-Regular.ttf differ diff --git a/igniter/Poppins/Poppins-SemiBold.ttf b/igniter/Poppins/Poppins-SemiBold.ttf new file mode 100644 index 0000000000..dabf7c242e Binary files /dev/null and b/igniter/Poppins/Poppins-SemiBold.ttf differ diff --git a/igniter/Poppins/Poppins-SemiBoldItalic.ttf b/igniter/Poppins/Poppins-SemiBoldItalic.ttf new file mode 100644 index 0000000000..29d5f7419b Binary files /dev/null and b/igniter/Poppins/Poppins-SemiBoldItalic.ttf differ diff --git a/igniter/Poppins/Poppins-Thin.ttf b/igniter/Poppins/Poppins-Thin.ttf new file mode 100644 index 0000000000..f5c0fdd531 Binary files /dev/null and b/igniter/Poppins/Poppins-Thin.ttf differ diff --git a/igniter/Poppins/Poppins-ThinItalic.ttf b/igniter/Poppins/Poppins-ThinItalic.ttf new file mode 100644 index 0000000000..b910089316 Binary files /dev/null and b/igniter/Poppins/Poppins-ThinItalic.ttf differ diff --git a/igniter/__init__.py b/igniter/__init__.py index 6f9757cfc8..20bf9be106 100644 --- a/igniter/__init__.py +++ b/igniter/__init__.py @@ -1,40 +1,34 @@ # -*- coding: utf-8 -*- """Open install dialog.""" +import os import sys -import os os.chdir(os.path.dirname(__file__)) # for override sys.path in Deadline -from Qt import QtWidgets # noqa -from Qt.QtCore import Signal # noqa - -from .install_dialog import InstallDialog from .bootstrap_repos import BootstrapRepos from .version import __version__ as version -RESULT = 0 - - -def get_result(res: int): - """Sets result returned from dialog.""" - global RESULT - RESULT = res - - def open_dialog(): """Show Igniter dialog.""" + from Qt import QtWidgets, QtCore + from .install_dialog import InstallDialog + + scale_attr = getattr(QtCore.Qt, "AA_EnableHighDpiScaling", None) + if scale_attr is not None: + QtWidgets.QApplication.setAttribute(scale_attr) + app = QtWidgets.QApplication(sys.argv) + d = InstallDialog() - d.finished.connect(get_result) d.open() - app.exec() - return RESULT + + app.exec_() + return d.result() __all__ = [ - "InstallDialog", "BootstrapRepos", "open_dialog", "version" diff --git a/igniter/bootstrap_repos.py b/igniter/bootstrap_repos.py index 2f305e24e3..754a2d2e25 100644 --- a/igniter/bootstrap_repos.py +++ b/igniter/bootstrap_repos.py @@ -14,7 +14,10 @@ from zipfile import ZipFile, BadZipFile from appdirs import user_data_dir from speedcopy import copyfile -from .user_settings import OpenPypeSettingsRegistry +from .user_settings import ( + OpenPypeSecureRegistry, + OpenPypeSettingsRegistry +) from .tools import get_openpype_path_from_db @@ -239,6 +242,7 @@ class BootstrapRepos: self._app = "openpype" self._log = log.getLogger(str(__class__)) self.data_dir = Path(user_data_dir(self._app, self._vendor)) + self.secure_registry = OpenPypeSecureRegistry("mongodb") self.registry = OpenPypeSettingsRegistry() self.zip_filter = [".pyc", "__pycache__"] self.openpype_filter = [ @@ -281,7 +285,7 @@ class BootstrapRepos: """Get version of local OpenPype.""" version = {} - path = Path(os.path.dirname(__file__)).parent / "openpype" / "version.py" + path = Path(os.environ["OPENPYPE_ROOT"]) / "openpype" / "version.py" with open(path, "r") as fp: exec(fp.read(), version) return version["__version__"] diff --git a/igniter/install_dialog.py b/igniter/install_dialog.py index 2cc0ed8448..e6439b5129 100644 --- a/igniter/install_dialog.py +++ b/igniter/install_dialog.py @@ -2,519 +2,496 @@ """Show dialog for choosing central pype repository.""" import os import sys +import re +import collections from Qt import QtCore, QtGui, QtWidgets # noqa from Qt.QtGui import QValidator # noqa from Qt.QtCore import QTimer # noqa -from .install_thread import InstallThread, InstallResult +from .install_thread import InstallThread from .tools import ( - validate_path_string, validate_mongo_connection, get_openpype_path_from_db ) -from .user_settings import OpenPypeSettingsRegistry +from .user_settings import OpenPypeSecureRegistry from .version import __version__ -class FocusHandlingLineEdit(QtWidgets.QLineEdit): - """Handling focus in/out on QLineEdit.""" - focusIn = QtCore.Signal() - focusOut = QtCore.Signal() +def load_stylesheet(): + stylesheet_path = os.path.join( + os.path.dirname(__file__), + "stylesheet.css" + ) + with open(stylesheet_path, "r") as file_stream: + stylesheet = file_stream.read() - def focusOutEvent(self, event): # noqa - """For emitting signal on focus out.""" - self.focusOut.emit() - super().focusOutEvent(event) + return stylesheet - def focusInEvent(self, event): # noqa - """For emitting signal on focus in.""" - self.focusIn.emit() - super().focusInEvent(event) + +class ButtonWithOptions(QtWidgets.QFrame): + option_clicked = QtCore.Signal(str) + + def __init__(self, commands, parent=None): + super(ButtonWithOptions, self).__init__(parent) + + self.setObjectName("ButtonWithOptions") + + options_btn = QtWidgets.QToolButton(self) + options_btn.setArrowType(QtCore.Qt.DownArrow) + options_btn.setIconSize(QtCore.QSize(12, 12)) + + default = None + default_label = None + options_menu = QtWidgets.QMenu(self) + for option, option_label in commands.items(): + if default is None: + default = option + default_label = option_label + continue + action = QtWidgets.QAction(option_label, options_menu) + action.setData(option) + options_menu.addAction(action) + + main_btn = QtWidgets.QPushButton(default_label, self) + main_btn.setFlat(True) + + main_layout = QtWidgets.QHBoxLayout(self) + main_layout.setContentsMargins(0, 0, 0, 0) + main_layout.setSpacing(1) + + main_layout.addWidget(main_btn, 1, QtCore.Qt.AlignVCenter) + main_layout.addWidget(options_btn, 0, QtCore.Qt.AlignVCenter) + + main_btn.clicked.connect(self._on_main_button) + options_btn.clicked.connect(self._on_options_click) + options_menu.triggered.connect(self._on_trigger) + + self.main_btn = main_btn + self.options_btn = options_btn + self.options_menu = options_menu + + options_btn.setEnabled(not options_menu.isEmpty()) + + self._default_value = default + + def resizeEvent(self, event): + super(ButtonWithOptions, self).resizeEvent(event) + self.options_btn.setFixedHeight(self.main_btn.height()) + + def _on_options_click(self): + pos = self.main_btn.rect().bottomLeft() + point = self.main_btn.mapToGlobal(pos) + self.options_menu.popup(point) + + def _on_trigger(self, action): + self.option_clicked.emit(action.data()) + + def _on_main_button(self): + self.option_clicked.emit(self._default_value) + + +class NiceProgressBar(QtWidgets.QProgressBar): + def __init__(self, parent=None): + super(NiceProgressBar, self).__init__(parent) + self._real_value = 0 + + def setValue(self, value): + self._real_value = value + if value != 0 and value < 11: + value = 11 + + super(NiceProgressBar, self).setValue(value) + + def value(self): + return self._real_value + + def text(self): + return "{} %".format(self._real_value) + + +class ConsoleWidget(QtWidgets.QWidget): + def __init__(self, parent=None): + super(ConsoleWidget, self).__init__(parent) + + # style for normal and error console text + default_console_style = QtGui.QTextCharFormat() + error_console_style = QtGui.QTextCharFormat() + default_console_style.setForeground( + QtGui.QColor.fromRgb(72, 200, 150) + ) + error_console_style.setForeground( + QtGui.QColor.fromRgb(184, 54, 19) + ) + + label = QtWidgets.QLabel("Console:", self) + + console_output = QtWidgets.QPlainTextEdit(self) + console_output.setMinimumSize(QtCore.QSize(300, 200)) + console_output.setReadOnly(True) + console_output.setCurrentCharFormat(default_console_style) + console_output.setObjectName("Console") + + main_layout = QtWidgets.QVBoxLayout(self) + main_layout.setContentsMargins(0, 0, 0, 0) + main_layout.addWidget(label, 0) + main_layout.addWidget(console_output, 1) + + self.default_console_style = default_console_style + self.error_console_style = error_console_style + + self.label = label + self.console_output = console_output + + self.hide_console() + + def hide_console(self): + self.label.setVisible(False) + self.console_output.setVisible(False) + + self.updateGeometry() + + def show_console(self): + self.label.setVisible(True) + self.console_output.setVisible(True) + + self.updateGeometry() + + def update_console(self, msg: str, error: bool = False) -> None: + if not error: + self.console_output.setCurrentCharFormat( + self.default_console_style + ) + else: + self.console_output.setCurrentCharFormat( + self.error_console_style + ) + self.console_output.appendPlainText(msg) + + +class MongoUrlInput(QtWidgets.QLineEdit): + """Widget to input mongodb URL.""" + + def set_valid(self): + """Set valid state on mongo url input.""" + self.setProperty("state", "valid") + self.style().polish(self) + + def remove_state(self): + """Set invalid state on mongo url input.""" + self.setProperty("state", "") + self.style().polish(self) + + def set_invalid(self): + """Set invalid state on mongo url input.""" + self.setProperty("state", "invalid") + self.style().polish(self) class InstallDialog(QtWidgets.QDialog): """Main Igniter dialog window.""" - _size_w = 400 - _size_h = 600 - path = "" - _controls_disabled = False + + mongo_url_regex = re.compile(r"^(mongodb|mongodb\+srv)://.*?") + + _width = 500 + _height = 200 + commands = collections.OrderedDict([ + ("run", "Start"), + ("run_from_code", "Run from code") + ]) def __init__(self, parent=None): super(InstallDialog, self).__init__(parent) - self.registry = OpenPypeSettingsRegistry() - self.mongo_url = "" + self.setWindowTitle( + f"OpenPype Igniter {__version__}" + ) + self.setWindowFlags( + QtCore.Qt.WindowCloseButtonHint + | QtCore.Qt.WindowMinimizeButtonHint + ) + + current_dir = os.path.dirname(os.path.abspath(__file__)) + roboto_font_path = os.path.join(current_dir, "RobotoMono-Regular.ttf") + poppins_font_path = os.path.join(current_dir, "Poppins") + icon_path = os.path.join(current_dir, "openpype_icon.png") + + # Install roboto font + QtGui.QFontDatabase.addApplicationFont(roboto_font_path) + for filename in os.listdir(poppins_font_path): + if os.path.splitext(filename)[1] == ".ttf": + QtGui.QFontDatabase.addApplicationFont(filename) + + # Load logo + pixmap_openpype_logo = QtGui.QPixmap(icon_path) + # Set logo as icon of window + self.setWindowIcon(QtGui.QIcon(pixmap_openpype_logo)) + + secure_registry = OpenPypeSecureRegistry("mongodb") + mongo_url = "" try: - self.mongo_url = ( + mongo_url = ( os.getenv("OPENPYPE_MONGO", "") - or self.registry.get_secure_item("openPypeMongo") + or secure_registry.get_item("openPypeMongo") ) except ValueError: pass - self.setWindowTitle( - f"OpenPype Igniter {__version__} - OpenPype installation") - self._icon_path = os.path.join( - os.path.dirname(__file__), 'openpype_icon.png') - icon = QtGui.QIcon(self._icon_path) - self.setWindowIcon(icon) - self.setWindowFlags( - QtCore.Qt.WindowCloseButtonHint | - QtCore.Qt.WindowMinimizeButtonHint - ) + self.mongo_url = mongo_url + self._pixmap_openpype_logo = pixmap_openpype_logo - self.setMinimumSize( - QtCore.QSize(self._size_w, self._size_h)) - self.setMaximumSize( - QtCore.QSize(self._size_w + 100, self._size_h + 500)) - - # style for normal console text - self.default_console_style = QtGui.QTextCharFormat() - # self.default_console_style.setFontPointSize(0.1) - self.default_console_style.setForeground( - QtGui.QColor.fromRgb(72, 200, 150)) - - # style for error console text - self.error_console_style = QtGui.QTextCharFormat() - # self.error_console_style.setFontPointSize(0.1) - self.error_console_style.setForeground( - QtGui.QColor.fromRgb(184, 54, 19)) - - QtGui.QFontDatabase.addApplicationFont( - os.path.join( - os.path.dirname(__file__), 'RobotoMono-Regular.ttf') - ) - self._openpype_run_ready = False + self._secure_registry = secure_registry + self._controls_disabled = False + self._install_thread = None + self.resize(QtCore.QSize(self._width, self._height)) self._init_ui() + # Set stylesheet + self.setStyleSheet(load_stylesheet()) + + # Trigger Mongo URL validation + self._mongo_input.setText(self.mongo_url) + def _init_ui(self): # basic visual style - dark background, light text - self.setStyleSheet(""" - color: rgb(200, 200, 200); - background-color: rgb(23, 23, 23); - """) - - main = QtWidgets.QVBoxLayout(self) # Main info # -------------------------------------------------------------------- - self.main_label = QtWidgets.QLabel( - """Welcome to OpenPype -

- We've detected OpenPype is not configured yet. But don't worry, - this is as easy as setting one or two things. -

- """) - self.main_label.setWordWrap(True) - self.main_label.setStyleSheet("color: rgb(200, 200, 200);") - - # OpenPype path info - # -------------------------------------------------------------------- - - self.openpype_path_label = QtWidgets.QLabel( - """This is Path to studio location where OpenPype versions - are stored. It will be pre-filled if your MongoDB connection is - already set and your studio defined this location. -

- Leave it empty if you want to install OpenPype version that - comes with this installation. -

-

- If you want to just try OpenPype without installing, hit the - middle button that states "run without installation". -

- """ - ) - - self.openpype_path_label.setWordWrap(True) - self.openpype_path_label.setStyleSheet("color: rgb(150, 150, 150);") - - # Path/Url box | Select button - # -------------------------------------------------------------------- - - input_layout = QtWidgets.QHBoxLayout() - - input_layout.setContentsMargins(0, 10, 0, 10) - self.user_input = FocusHandlingLineEdit() - - self.user_input.setPlaceholderText("Path to OpenPype versions") - self.user_input.textChanged.connect(self._path_changed) - self.user_input.setStyleSheet( - ("color: rgb(233, 233, 233);" - "background-color: rgb(64, 64, 64);" - "padding: 0.5em;" - "border: 1px solid rgb(32, 32, 32);") - ) - - self.user_input.setValidator(PathValidator(self.user_input)) - - self._btn_select = QtWidgets.QPushButton("Select") - self._btn_select.setToolTip( - "Select OpenPype repository" - ) - self._btn_select.setStyleSheet( - ("color: rgb(64, 64, 64);" - "background-color: rgb(72, 200, 150);" - "padding: 0.5em;") - ) - self._btn_select.setMaximumSize(100, 140) - self._btn_select.clicked.connect(self._on_select_clicked) - - input_layout.addWidget(self.user_input) - input_layout.addWidget(self._btn_select) + main_label = QtWidgets.QLabel("Welcome to OpenPype", self) + main_label.setWordWrap(True) + main_label.setObjectName("MainLabel") # Mongo box | OK button # -------------------------------------------------------------------- - - self.mongo_label = QtWidgets.QLabel( - """Enter URL for running MongoDB instance:""" + mongo_input = MongoUrlInput(self) + mongo_input.setPlaceholderText( + "Enter your database Address. Example: mongodb://192.168.1.10:2707" ) - self.mongo_label.setWordWrap(True) - self.mongo_label.setStyleSheet("color: rgb(150, 150, 150);") + mongo_messages_widget = QtWidgets.QWidget(self) - class MongoWidget(QtWidgets.QWidget): - """Widget to input mongodb URL.""" - - def __init__(self, parent=None): - self._btn_mongo = None - super(MongoWidget, self).__init__(parent) - mongo_layout = QtWidgets.QHBoxLayout() - mongo_layout.setContentsMargins(0, 0, 0, 0) - self._mongo_input = FocusHandlingLineEdit() - self._mongo_input.setPlaceholderText("Mongo URL") - self._mongo_input.textChanged.connect(self._mongo_changed) - self._mongo_input.focusIn.connect(self._focus_in) - self._mongo_input.focusOut.connect(self._focus_out) - self._mongo_input.setValidator( - MongoValidator(self._mongo_input)) - self._mongo_input.setStyleSheet( - ("color: rgb(233, 233, 233);" - "background-color: rgb(64, 64, 64);" - "padding: 0.5em;" - "border: 1px solid rgb(32, 32, 32);") - ) - - mongo_layout.addWidget(self._mongo_input) - self.setLayout(mongo_layout) - - def _focus_out(self): - self.validate_url() - - def _focus_in(self): - self._mongo_input.setStyleSheet( - """ - background-color: rgb(32, 32, 19); - color: rgb(255, 190, 15); - padding: 0.5em; - border: 1px solid rgb(64, 64, 32); - """ - ) - - def _mongo_changed(self, mongo: str): - self.parent().mongo_url = mongo - - def get_mongo_url(self) -> str: - """Helper to get url from parent.""" - return self.parent().mongo_url - - def set_mongo_url(self, mongo: str): - """Helper to set url to parent. - - Args: - mongo (str): mongodb url string. - - """ - self._mongo_input.setText(mongo) - - def set_valid(self): - """Set valid state on mongo url input.""" - self._mongo_input.setStyleSheet( - """ - background-color: rgb(19, 19, 19); - color: rgb(64, 230, 132); - padding: 0.5em; - border: 1px solid rgb(32, 64, 32); - """ - ) - self.parent().install_button.setEnabled(True) - - def set_invalid(self): - """Set invalid state on mongo url input.""" - self._mongo_input.setStyleSheet( - """ - background-color: rgb(32, 19, 19); - color: rgb(255, 69, 0); - padding: 0.5em; - border: 1px solid rgb(64, 32, 32); - """ - ) - self.parent().install_button.setEnabled(False) - - def set_read_only(self, state: bool): - """Set input read-only.""" - self._mongo_input.setReadOnly(state) - - def validate_url(self) -> bool: - """Validate if entered url is ok. - - Returns: - True if url is valid monogo string. - - """ - if self.parent().mongo_url == "": - return False - - is_valid, reason_str = validate_mongo_connection( - self.parent().mongo_url - ) - if not is_valid: - self.set_invalid() - self.parent().update_console(f"!!! {reason_str}", True) - return False - else: - self.set_valid() - return True - - self._mongo = MongoWidget(self) - if self.mongo_url: - self._mongo.set_mongo_url(self.mongo_url) - - # Bottom button bar - # -------------------------------------------------------------------- - bottom_widget = QtWidgets.QWidget() - bottom_layout = QtWidgets.QHBoxLayout() - openpype_logo_label = QtWidgets.QLabel("openpype logo") - openpype_logo = QtGui.QPixmap(self._icon_path) - # openpype_logo.scaled( - # openpype_logo_label.width(), - # openpype_logo_label.height(), QtCore.Qt.KeepAspectRatio) - openpype_logo_label.setPixmap(openpype_logo) - openpype_logo_label.setContentsMargins(10, 0, 0, 10) - - # install button - - - - - - - - - - - - - - - - - - - - - - - - - - - - self.install_button = QtWidgets.QPushButton("Install") - self.install_button.setStyleSheet( - ("color: rgb(64, 64, 64);" - "background-color: rgb(72, 200, 150);" - "padding: 0.5em;") + mongo_connection_msg = QtWidgets.QLabel(mongo_messages_widget) + mongo_connection_msg.setVisible(True) + mongo_connection_msg.setTextInteractionFlags( + QtCore.Qt.TextSelectableByMouse ) - self.install_button.setMinimumSize(64, 24) - self.install_button.setToolTip("Install OpenPype") - self.install_button.clicked.connect(self._on_ok_clicked) - # run from current button - - - - - - - - - - - - - - - - - - - - - - - self.run_button = QtWidgets.QPushButton("Run without installation") - self.run_button.setStyleSheet( - ("color: rgb(64, 64, 64);" - "background-color: rgb(200, 164, 64);" - "padding: 0.5em;") - ) - self.run_button.setMinimumSize(64, 24) - self.run_button.setToolTip("Run without installing Pype") - self.run_button.clicked.connect(self._on_run_clicked) - - # install button - - - - - - - - - - - - - - - - - - - - - - - - - - - - self._exit_button = QtWidgets.QPushButton("Exit") - self._exit_button.setStyleSheet( - ("color: rgb(64, 64, 64);" - "background-color: rgb(128, 128, 128);" - "padding: 0.5em;") - ) - self._exit_button.setMinimumSize(64, 24) - self._exit_button.setToolTip("Exit") - self._exit_button.clicked.connect(self._on_exit_clicked) - - bottom_layout.setContentsMargins(0, 10, 10, 0) - bottom_layout.setAlignment(QtCore.Qt.AlignVCenter) - bottom_layout.addWidget(openpype_logo_label, 0, QtCore.Qt.AlignVCenter) - bottom_layout.addStretch(1) - bottom_layout.addWidget(self.install_button, 0, QtCore.Qt.AlignVCenter) - bottom_layout.addWidget(self.run_button, 0, QtCore.Qt.AlignVCenter) - bottom_layout.addWidget(self._exit_button, 0, QtCore.Qt.AlignVCenter) - - bottom_widget.setLayout(bottom_layout) - bottom_widget.setStyleSheet("background-color: rgb(32, 32, 32);") - - # Console label - # -------------------------------------------------------------------- - self._status_label = QtWidgets.QLabel("Console:") - self._status_label.setContentsMargins(0, 10, 0, 10) - self._status_label.setStyleSheet("color: rgb(61, 115, 97);") - - # Console - # -------------------------------------------------------------------- - self._status_box = QtWidgets.QPlainTextEdit() - self._status_box.setReadOnly(True) - self._status_box.setCurrentCharFormat(self.default_console_style) - self._status_box.setStyleSheet( - """QPlainTextEdit { - background-color: rgb(32, 32, 32); - color: rgb(72, 200, 150); - font-family: "Roboto Mono"; - font-size: 0.5em; - border: 1px solid rgb(48, 48, 48); - } - QScrollBar:vertical { - border: 1px solid rgb(61, 115, 97); - background: #000; - width:5px; - margin: 0px 0px 0px 0px; - } - QScrollBar::handle:vertical { - background: rgb(72, 200, 150); - min-height: 0px; - } - QScrollBar::sub-page:vertical { - background: rgb(31, 62, 50); - } - QScrollBar::add-page:vertical { - background: rgb(31, 62, 50); - } - QScrollBar::add-line:vertical { - background: rgb(72, 200, 150); - height: 0px; - subcontrol-position: bottom; - subcontrol-origin: margin; - } - QScrollBar::sub-line:vertical { - background: rgb(72, 200, 150); - height: 0 px; - subcontrol-position: top; - subcontrol-origin: margin; - } - """ - ) + mongo_messages_layout = QtWidgets.QVBoxLayout(mongo_messages_widget) + mongo_messages_layout.setContentsMargins(0, 0, 0, 0) + mongo_messages_layout.addWidget(mongo_connection_msg) # Progress bar # -------------------------------------------------------------------- - self._progress_bar = QtWidgets.QProgressBar() - self._progress_bar.setValue(0) - self._progress_bar.setAlignment(QtCore.Qt.AlignCenter) - self._progress_bar.setTextVisible(False) - # setting font and the size - self._progress_bar.setFont(QtGui.QFont('Arial', 7)) - self._progress_bar.setStyleSheet( - """QProgressBar:horizontal { - height: 5px; - border: 1px solid rgb(31, 62, 50); - color: rgb(72, 200, 150); - } - QProgressBar::chunk:horizontal { - background-color: rgb(72, 200, 150); - } - """ + progress_bar = NiceProgressBar(self) + progress_bar.setAlignment(QtCore.Qt.AlignCenter) + progress_bar.setTextVisible(False) + + # Console + # -------------------------------------------------------------------- + console_widget = ConsoleWidget(self) + + # Bottom button bar + # -------------------------------------------------------------------- + bottom_widget = QtWidgets.QWidget(self) + + btns_widget = QtWidgets.QWidget(bottom_widget) + + openpype_logo_label = QtWidgets.QLabel("openpype logo", bottom_widget) + openpype_logo_label.setPixmap(self._pixmap_openpype_logo) + + run_button = ButtonWithOptions( + self.commands, + btns_widget ) + run_button.setMinimumSize(64, 24) + run_button.setToolTip("Run OpenPype") + + # install button - - - - - - - - - - - - - - - - - - - - - - - - - - - + exit_button = QtWidgets.QPushButton("Exit", btns_widget) + exit_button.setObjectName("ExitBtn") + exit_button.setFlat(True) + exit_button.setMinimumSize(64, 24) + exit_button.setToolTip("Exit") + + btns_layout = QtWidgets.QHBoxLayout(btns_widget) + btns_layout.setContentsMargins(0, 0, 0, 0) + btns_layout.addWidget(run_button, 0) + btns_layout.addWidget(exit_button, 0) + + bottom_layout = QtWidgets.QHBoxLayout(bottom_widget) + bottom_layout.setContentsMargins(0, 0, 0, 0) + bottom_layout.setAlignment(QtCore.Qt.AlignHCenter) + bottom_layout.addWidget(openpype_logo_label, 0) + bottom_layout.addStretch(1) + bottom_layout.addWidget(btns_widget, 0) + # add all to main - main.addWidget(self.main_label, 0) - main.addWidget(self.openpype_path_label, 0) - main.addLayout(input_layout, 0) - main.addWidget(self.mongo_label, 0) - main.addWidget(self._mongo, 0) + main = QtWidgets.QVBoxLayout(self) + main.addSpacing(15) + main.addWidget(main_label, 0) + main.addSpacing(15) + main.addWidget(mongo_input, 0) + main.addWidget(mongo_messages_widget, 0) - main.addWidget(self._status_label, 0) - main.addWidget(self._status_box, 1) + main.addWidget(progress_bar, 0) + main.addSpacing(15) + + main.addWidget(console_widget, 1) - main.addWidget(self._progress_bar, 0) main.addWidget(bottom_widget, 0) - self.setLayout(main) + run_button.option_clicked.connect(self._on_run_btn_click) + exit_button.clicked.connect(self._on_exit_clicked) + mongo_input.textChanged.connect(self._on_mongo_url_change) - # if mongo url is ok, try to get openpype path from there - if self._mongo.validate_url() and len(self.path) == 0: - self.path = get_openpype_path_from_db(self.mongo_url) - self.user_input.setText(self.path) + self._console_widget = console_widget - def _on_select_clicked(self): - """Show directory dialog.""" - options = QtWidgets.QFileDialog.Options() - options |= QtWidgets.QFileDialog.DontUseNativeDialog - options |= QtWidgets.QFileDialog.ShowDirsOnly + self.main_label = main_label - result = QtWidgets.QFileDialog.getExistingDirectory( - parent=self, - caption='Select path', - directory=os.getcwd(), - options=options) + self._mongo_input = mongo_input - if not result: + self._mongo_connection_msg = mongo_connection_msg + + self._run_button = run_button + self._exit_button = exit_button + self._progress_bar = progress_bar + + def _on_run_btn_click(self, option): + # Disable buttons + self._disable_buttons() + # Set progress to any value + self._update_progress(1) + self._progress_bar.repaint() + # Add label to show that is connecting to mongo + self.set_invalid_mongo_connection(self.mongo_url, True) + + # Process events to repaint changes + QtWidgets.QApplication.processEvents() + + if not self.validate_url(): + self._enable_buttons() + self._update_progress(0) + # Update any messages + self._mongo_input.setText(self.mongo_url) return - filename = QtCore.QDir.toNativeSeparators(result) - - if os.path.isdir(filename): - self.path = filename - self.user_input.setText(filename) - - def _on_run_clicked(self): - valid, reason = validate_mongo_connection( - self._mongo.get_mongo_url() - ) - if not valid: - self._mongo.set_invalid() - self.update_console(f"!!! {reason}", True) - return + if option == "run": + self._run_openpype() + elif option == "run_from_code": + self._run_openpype_from_code() else: - self._mongo.set_valid() + raise AssertionError("BUG: Unknown variant \"{}\"".format(option)) + + self._enable_buttons() + + def _run_openpype_from_code(self): + self._secure_registry.set_item("openPypeMongo", self.mongo_url) self.done(2) - def _on_ok_clicked(self): + def _run_openpype(self): """Start install process. This will once again validate entered path and mongo if ok, start working thread that will do actual job. """ - valid, reason = validate_mongo_connection( - self._mongo.get_mongo_url() - ) - if not valid: - self._mongo.set_invalid() - self.update_console(f"!!! {reason}", True) - return - else: - self._mongo.set_valid() - - if self._openpype_run_ready: - self.done(3) + # Check if install thread is not already running + if self._install_thread and self._install_thread.isRunning(): return - if self.path and len(self.path) > 0: - valid, reason = validate_path_string(self.path) + self._mongo_input.set_valid() - if not valid: - self.update_console(f"!!! {reason}", True) - return + install_thread = InstallThread(self) + install_thread.message.connect(self.update_console) + install_thread.progress.connect(self._update_progress) + install_thread.finished.connect(self._installation_finished) + install_thread.set_mongo(self.mongo_url) - self._disable_buttons() - self._install_thread = InstallThread( - self.install_result_callback_handler, self) - self._install_thread.message.connect(self.update_console) - self._install_thread.progress.connect(self._update_progress) - self._install_thread.finished.connect(self._enable_buttons) - self._install_thread.set_path(self.path) - self._install_thread.set_mongo(self._mongo.get_mongo_url()) - self._install_thread.start() + self._install_thread = install_thread - def install_result_callback_handler(self, result: InstallResult): - """Change button behaviour based on installation outcome.""" - status = result.status + install_thread.start() + + def _installation_finished(self): + status = self._install_thread.result() if status >= 0: - self.install_button.setText("Run installed OpenPype") - self._openpype_run_ready = True + self._update_progress(100) + QtWidgets.QApplication.processEvents() + self.done(3) + else: + self._show_console() def _update_progress(self, progress: int): self._progress_bar.setValue(progress) + text_visible = self._progress_bar.isTextVisible() + if progress == 0: + if text_visible: + self._progress_bar.setTextVisible(False) + elif not text_visible: + self._progress_bar.setTextVisible(True) def _on_exit_clicked(self): self.reject() - def _path_changed(self, path: str) -> str: - """Set path.""" - self.path = path - return path + def _on_mongo_url_change(self, new_value): + # Strip the value + new_value = new_value.strip() + # Store new mongo url to variable + self.mongo_url = new_value + + msg = None + # Change style of input + if not new_value: + self._mongo_input.remove_state() + elif not self.mongo_url_regex.match(new_value): + self._mongo_input.set_invalid() + msg = ( + "Mongo URL should start with" + " \"mongodb://\" or \"mongodb+srv://\"" + ) + else: + self._mongo_input.set_valid() + + self.set_invalid_mongo_url(msg) + + def validate_url(self): + """Validate if entered url is ok. + + Returns: + True if url is valid monogo string. + + """ + if self.mongo_url == "": + return False + + is_valid, reason_str = validate_mongo_connection(self.mongo_url) + if not is_valid: + self.set_invalid_mongo_connection(self.mongo_url) + self._mongo_input.set_invalid() + self.update_console(f"!!! {reason_str}", True) + return False + + self.set_invalid_mongo_connection(None) + self._mongo_input.set_valid() + return True + + def set_invalid_mongo_url(self, reason): + if reason is None: + self._mongo_connection_msg.setText("") + else: + self._mongo_connection_msg.setText("- {}".format(reason)) + + def set_invalid_mongo_connection(self, mongo_url, connecting=False): + if mongo_url is None: + self.set_invalid_mongo_url(mongo_url) + return + + if connecting: + msg = "Connecting to: {}".format(mongo_url) + else: + msg = "Can't connect to: {}".format(mongo_url) + + self.set_invalid_mongo_url(msg) def update_console(self, msg: str, error: bool = False) -> None: """Display message in console. @@ -523,26 +500,22 @@ class InstallDialog(QtWidgets.QDialog): msg (str): message. error (bool): if True, print it red. """ - if not error: - self._status_box.setCurrentCharFormat(self.default_console_style) - else: - self._status_box.setCurrentCharFormat(self.error_console_style) - self._status_box.appendPlainText(msg) + self._console_widget.update_console(msg, error) + + def _show_console(self): + self._console_widget.show_console() + self.updateGeometry() def _disable_buttons(self): """Disable buttons so user interaction doesn't interfere.""" - self._btn_select.setEnabled(False) - self.run_button.setEnabled(False) self._exit_button.setEnabled(False) - self.install_button.setEnabled(False) + self._run_button.setEnabled(False) self._controls_disabled = True def _enable_buttons(self): """Enable buttons after operation is complete.""" - self._btn_select.setEnabled(True) - self.run_button.setEnabled(True) self._exit_button.setEnabled(True) - self.install_button.setEnabled(True) + self._run_button.setEnabled(True) self._controls_disabled = False def closeEvent(self, event): # noqa @@ -552,212 +525,6 @@ class InstallDialog(QtWidgets.QDialog): return super(InstallDialog, self).closeEvent(event) -class MongoValidator(QValidator): - """Validate mongodb url for Qt widgets.""" - - def __init__(self, parent=None, intermediate=False): - self.parent = parent - self.intermediate = intermediate - self._validate_lock = False - self.timer = QTimer() - self.timer.timeout.connect(self._unlock_validator) - super().__init__(parent) - - def _unlock_validator(self): - self._validate_lock = False - - def _return_state( - self, state: QValidator.State, reason: str, mongo: str): - """Set stylesheets and actions on parent based on state. - - Warning: - This will always return `QValidator.State.Acceptable` as - anything different will stop input to `QLineEdit` - - """ - - if state == QValidator.State.Invalid: - self.parent.setToolTip(reason) - self.parent.setStyleSheet( - """ - background-color: rgb(32, 19, 19); - color: rgb(255, 69, 0); - padding: 0.5em; - border: 1px solid rgb(64, 32, 32); - """ - ) - elif state == QValidator.State.Intermediate and self.intermediate: - self.parent.setToolTip(reason) - self.parent.setStyleSheet( - """ - background-color: rgb(32, 32, 19); - color: rgb(255, 190, 15); - padding: 0.5em; - border: 1px solid rgb(64, 64, 32); - """ - ) - else: - self.parent.setToolTip(reason) - self.parent.setStyleSheet( - """ - background-color: rgb(19, 19, 19); - color: rgb(64, 230, 132); - padding: 0.5em; - border: 1px solid rgb(32, 64, 32); - """ - ) - - return QValidator.State.Acceptable, mongo, len(mongo) - - def validate(self, mongo: str, pos: int) -> (QValidator.State, str, int): # noqa - """Validate entered mongodb connection string. - - As url (it should start with `mongodb://` or - `mongodb+srv:// url schema. - - Args: - mongo (str): connection string url. - pos (int): current position. - - Returns: - (QValidator.State.Acceptable, str, int): - Indicate input state with color and always return - Acceptable state as we need to be able to edit input further. - - """ - if not mongo.startswith("mongodb"): - return self._return_state( - QValidator.State.Invalid, "need mongodb schema", mongo) - - return self._return_state( - QValidator.State.Intermediate, "", mongo) - - -class PathValidator(MongoValidator): - """Validate mongodb url for Qt widgets.""" - - def validate(self, path: str, pos: int) -> (QValidator.State, str, int): # noqa - """Validate path to be accepted by Igniter. - - Args: - path (str): path to OpenPype. - pos (int): current position. - - Returns: - (QValidator.State.Acceptable, str, int): - Indicate input state with color and always return - Acceptable state as we need to be able to edit input further. - - """ - # allow empty path as that will use current version coming with - # OpenPype Igniter - if len(path) == 0: - return self._return_state( - QValidator.State.Acceptable, "Use version with Igniter", path) - - if len(path) > 3: - valid, reason = validate_path_string(path) - if not valid: - return self._return_state( - QValidator.State.Invalid, reason, path) - else: - return self._return_state( - QValidator.State.Acceptable, reason, path) - - -class CollapsibleWidget(QtWidgets.QWidget): - """Collapsible widget to hide mongo url in necessary.""" - - def __init__(self, parent=None, title: str = "", animation: int = 300): - self._mainLayout = QtWidgets.QGridLayout(parent) - self._toggleButton = QtWidgets.QToolButton(parent) - self._headerLine = QtWidgets.QFrame(parent) - self._toggleAnimation = QtCore.QParallelAnimationGroup(parent) - self._contentArea = QtWidgets.QScrollArea(parent) - self._animation = animation - self._title = title - super(CollapsibleWidget, self).__init__(parent) - self._init_ui() - - def _init_ui(self): - self._toggleButton.setStyleSheet( - """QToolButton { - border: none; - } - """) - self._toggleButton.setToolButtonStyle( - QtCore.Qt.ToolButtonTextBesideIcon) - - self._toggleButton.setArrowType(QtCore.Qt.ArrowType.RightArrow) - self._toggleButton.setText(self._title) - self._toggleButton.setCheckable(True) - self._toggleButton.setChecked(False) - - self._headerLine.setFrameShape(QtWidgets.QFrame.HLine) - self._headerLine.setFrameShadow(QtWidgets.QFrame.Sunken) - self._headerLine.setSizePolicy(QtWidgets.QSizePolicy.Expanding, - QtWidgets.QSizePolicy.Maximum) - - self._contentArea.setStyleSheet( - """QScrollArea { - background-color: rgb(32, 32, 32); - border: none; - } - """) - self._contentArea.setSizePolicy(QtWidgets.QSizePolicy.Expanding, - QtWidgets.QSizePolicy.Fixed) - self._contentArea.setMaximumHeight(0) - self._contentArea.setMinimumHeight(0) - - self._toggleAnimation.addAnimation( - QtCore.QPropertyAnimation(self, b"minimumHeight")) - self._toggleAnimation.addAnimation( - QtCore.QPropertyAnimation(self, b"maximumHeight")) - self._toggleAnimation.addAnimation( - QtCore.QPropertyAnimation(self._contentArea, b"maximumHeight")) - - self._mainLayout.setVerticalSpacing(0) - self._mainLayout.setContentsMargins(0, 0, 0, 0) - - row = 0 - - self._mainLayout.addWidget( - self._toggleButton, row, 0, 1, 1, QtCore.Qt.AlignCenter) - self._mainLayout.addWidget( - self._headerLine, row, 2, 1, 1) - row += row - self._mainLayout.addWidget(self._contentArea, row, 0, 1, 3) - self.setLayout(self._mainLayout) - - self._toggleButton.toggled.connect(self._toggle_action) - - def _toggle_action(self, collapsed: bool): - arrow = QtCore.Qt.ArrowType.DownArrow if collapsed else QtCore.Qt.ArrowType.RightArrow # noqa: E501 - direction = QtCore.QAbstractAnimation.Forward if collapsed else QtCore.QAbstractAnimation.Backward # noqa: E501 - self._toggleButton.setArrowType(arrow) - self._toggleAnimation.setDirection(direction) - self._toggleAnimation.start() - - def setContentLayout(self, content_layout: QtWidgets.QLayout): # noqa - self._contentArea.setLayout(content_layout) - collapsed_height = \ - self.sizeHint().height() - self._contentArea.maximumHeight() - content_height = self._contentArea.sizeHint().height() - - for i in range(self._toggleAnimation.animationCount() - 1): - sec_anim = self._toggleAnimation.animationAt(i) - sec_anim.setDuration(self._animation) - sec_anim.setStartValue(collapsed_height) - sec_anim.setEndValue(collapsed_height + content_height) - - con_anim = self._toggleAnimation.animationAt( - self._toggleAnimation.animationCount() - 1) - - con_anim.setDuration(self._animation) - con_anim.setStartValue(0) - con_anim.setEndValue(collapsed_height + content_height) - - if __name__ == "__main__": app = QtWidgets.QApplication(sys.argv) d = InstallDialog() diff --git a/igniter/install_thread.py b/igniter/install_thread.py index bf5d541056..383012b88b 100644 --- a/igniter/install_thread.py +++ b/igniter/install_thread.py @@ -17,12 +17,6 @@ from .bootstrap_repos import ( from .tools import validate_mongo_connection -class InstallResult(QObject): - """Used to pass results back.""" - def __init__(self, value): - self.status = value - - class InstallThread(QThread): """Install Worker thread. @@ -36,15 +30,22 @@ class InstallThread(QThread): """ progress = Signal(int) message = Signal((str, bool)) - finished = Signal(object) - def __init__(self, callback, parent=None,): + def __init__(self, parent=None,): self._mongo = None self._path = None - self.result_callback = callback + self._result = None QThread.__init__(self, parent) - self.finished.connect(callback) + + def result(self): + """Result of finished installation.""" + return self._result + + def _set_result(self, value): + if self._result is not None: + raise AssertionError("BUG: Result was set more than once!") + self._result = value def run(self): """Thread entry point. @@ -71,18 +72,18 @@ class InstallThread(QThread): if not os.getenv("OPENPYPE_MONGO"): # try to get it from settings registry try: - self._mongo = bs.registry.get_secure_item( + self._mongo = bs.secure_registry.get_item( "openPypeMongo") except ValueError: self.message.emit( "!!! We need MongoDB URL to proceed.", True) - self.finished.emit(InstallResult(-1)) + self._set_result(-1) return else: self._mongo = os.getenv("OPENPYPE_MONGO") else: self.message.emit("Saving mongo connection string ...", False) - bs.registry.set_secure_item("openPypeMongo", self._mongo) + bs.secure_registry.set_item("openPypeMongo", self._mongo) os.environ["OPENPYPE_MONGO"] = self._mongo @@ -101,7 +102,7 @@ class InstallThread(QThread): self.message.emit("Skipping OpenPype install ...", False) if detected[-1].path.suffix.lower() == ".zip": bs.extract_openpype(detected[-1]) - self.finished.emit(InstallResult(0)) + self._set_result(0) return if OpenPypeVersion(version=local_version).get_main_version() == detected[-1].get_main_version(): # noqa @@ -110,7 +111,7 @@ class InstallThread(QThread): f"currently running {local_version}" ), False) self.message.emit("Skipping OpenPype install ...", False) - self.finished.emit(InstallResult(0)) + self._set_result(0) return self.message.emit(( @@ -126,13 +127,13 @@ class InstallThread(QThread): if not openpype_version: self.message.emit( f"!!! Install failed - {openpype_version}", True) - self.finished.emit(InstallResult(-1)) + self._set_result(-1) return self.message.emit(f"Using: {openpype_version}", False) bs.install_version(openpype_version) self.message.emit(f"Installed as {openpype_version}", False) self.progress.emit(100) - self.finished.emit(InstallResult(1)) + self._set_result(1) return else: self.message.emit("None detected.", False) @@ -144,7 +145,7 @@ class InstallThread(QThread): if not local_openpype: self.message.emit( f"!!! Install failed - {local_openpype}", True) - self.finished.emit(InstallResult(-1)) + self._set_result(-1) return try: @@ -154,11 +155,12 @@ class InstallThread(QThread): OpenPypeVersionIOError) as e: self.message.emit(f"Installed failed: ", True) self.message.emit(str(e), True) - self.finished.emit(InstallResult(-1)) + self._set_result(-1) return self.message.emit(f"Installed as {local_openpype}", False) self.progress.emit(100) + self._set_result(1) return else: # if we have mongo connection string, validate it, set it to @@ -167,9 +169,9 @@ class InstallThread(QThread): if not validate_mongo_connection(self._mongo): self.message.emit( f"!!! invalid mongo url {self._mongo}", True) - self.finished.emit(InstallResult(-1)) + self._set_result(-1) return - bs.registry.set_secure_item("openPypeMongo", self._mongo) + bs.secure_registry.set_item("openPypeMongo", self._mongo) os.environ["OPENPYPE_MONGO"] = self._mongo self.message.emit(f"processing {self._path}", True) @@ -177,11 +179,11 @@ class InstallThread(QThread): if not repo_file: self.message.emit("!!! Cannot install", True) - self.finished.emit(InstallResult(-1)) + self._set_result(-1) return self.progress.emit(100) - self.finished.emit(InstallResult(1)) + self._set_result(1) return def set_path(self, path: str) -> None: diff --git a/igniter/openpype.icns b/igniter/openpype.icns new file mode 100644 index 0000000000..792f819ad9 Binary files /dev/null and b/igniter/openpype.icns differ diff --git a/igniter/stylesheet.css b/igniter/stylesheet.css new file mode 100644 index 0000000000..8df2621d83 --- /dev/null +++ b/igniter/stylesheet.css @@ -0,0 +1,280 @@ +*{ + font-size: 10pt; + font-family: "Poppins"; +} + +QWidget { + color: #bfccd6; + background-color: #282C34; + border-radius: 0px; +} + +QMenu { + border: 1px solid #555555; + background-color: #21252B; +} + +QMenu::item { + padding: 5px 10px 5px 10px; + border-left: 5px solid #313741;; +} + +QMenu::item:selected { + border-left-color: rgb(84, 209, 178); + background-color: #222d37; +} + +QLineEdit, QPlainTextEdit { + border: 1px solid #464b54; + border-radius: 3px; + background-color: #21252B; + padding: 0.5em; +} + +QLineEdit[state="valid"] { + background-color: rgb(19, 19, 19); + color: rgb(64, 230, 132); + border-color: rgb(32, 64, 32); +} + +QLineEdit[state="invalid"] { + background-color: rgb(32, 19, 19); + color: rgb(255, 69, 0); + border-color: rgb(64, 32, 32); +} + +QLabel { + background: transparent; + color: #969b9e; +} + +QLabel:hover {color: #b8c1c5;} + +QPushButton { + border: 1px solid #aaaaaa; + border-radius: 3px; + padding: 5px; +} + +QPushButton:hover { + background-color: #333840; + border: 1px solid #fff; + color: #fff; +} + +QTableView { + border: 1px solid #444; + gridline-color: #6c6c6c; + background-color: #201F1F; + alternate-background-color:#21252B; +} + +QTableView::item:pressed, QListView::item:pressed, QTreeView::item:pressed { + background: #78879b; + color: #FFFFFF; +} + +QTableView::item:selected:active, QTreeView::item:selected:active, QListView::item:selected:active { + background: #3d8ec9; +} + +QProgressBar { + border: 1px solid grey; + border-radius: 10px; + color: #222222; + font-weight: bold; +} +QProgressBar:horizontal { + height: 20px; +} + +QProgressBar::chunk { + border-radius: 10px; + background-color: qlineargradient( + x1: 0, + y1: 0.5, + x2: 1, + y2: 0.5, + stop: 0 rgb(72, 200, 150), + stop: 1 rgb(82, 172, 215) + ); +} + + +QScrollBar:horizontal { + height: 15px; + margin: 3px 15px 3px 15px; + border: 1px transparent #21252B; + border-radius: 4px; + background-color: #21252B; +} + +QScrollBar::handle:horizontal { + background-color: #4B5362; + min-width: 5px; + border-radius: 4px; +} + +QScrollBar::add-line:horizontal { + margin: 0px 3px 0px 3px; + border-image: url(:/qss_icons/rc/right_arrow_disabled.png); + width: 10px; + height: 10px; + subcontrol-position: right; + subcontrol-origin: margin; +} + +QScrollBar::sub-line:horizontal { + margin: 0px 3px 0px 3px; + border-image: url(:/qss_icons/rc/left_arrow_disabled.png); + height: 10px; + width: 10px; + subcontrol-position: left; + subcontrol-origin: margin; +} + +QScrollBar::add-line:horizontal:hover,QScrollBar::add-line:horizontal:on { + border-image: url(:/qss_icons/rc/right_arrow.png); + height: 10px; + width: 10px; + subcontrol-position: right; + subcontrol-origin: margin; +} + +QScrollBar::sub-line:horizontal:hover, QScrollBar::sub-line:horizontal:on { + border-image: url(:/qss_icons/rc/left_arrow.png); + height: 10px; + width: 10px; + subcontrol-position: left; + subcontrol-origin: margin; +} + +QScrollBar::up-arrow:horizontal, QScrollBar::down-arrow:horizontal { + background: none; +} + +QScrollBar::add-page:horizontal, QScrollBar::sub-page:horizontal { + background: none; +} + +QScrollBar:vertical { + background-color: #21252B; + width: 15px; + margin: 15px 3px 15px 3px; + border: 1px transparent #21252B; + border-radius: 4px; +} + +QScrollBar::handle:vertical { + background-color: #4B5362; + min-height: 5px; + border-radius: 4px; +} + +QScrollBar::sub-line:vertical { + margin: 3px 0px 3px 0px; + border-image: url(:/qss_icons/rc/up_arrow_disabled.png); + height: 10px; + width: 10px; + subcontrol-position: top; + subcontrol-origin: margin; +} + +QScrollBar::add-line:vertical { + margin: 3px 0px 3px 0px; + border-image: url(:/qss_icons/rc/down_arrow_disabled.png); + height: 10px; + width: 10px; + subcontrol-position: bottom; + subcontrol-origin: margin; +} + +QScrollBar::sub-line:vertical:hover,QScrollBar::sub-line:vertical:on { + + border-image: url(:/qss_icons/rc/up_arrow.png); + height: 10px; + width: 10px; + subcontrol-position: top; + subcontrol-origin: margin; +} + + +QScrollBar::add-line:vertical:hover, QScrollBar::add-line:vertical:on { + border-image: url(:/qss_icons/rc/down_arrow.png); + height: 10px; + width: 10px; + subcontrol-position: bottom; + subcontrol-origin: margin; +} + +QScrollBar::up-arrow:vertical, QScrollBar::down-arrow:vertical { + background: none; +} + + +QScrollBar::add-page:vertical, QScrollBar::sub-page:vertical { + background: none; +} + +#MainLabel { + color: rgb(200, 200, 200); + font-size: 12pt; +} + +#Console { + background-color: #21252B; + color: rgb(72, 200, 150); + font-family: "Roboto Mono"; + font-size: 8pt; +} + +#ExitBtn { + /* `border` must be set to background of flat button is painted .*/ + border: none; + color: rgb(39, 39, 39); + background-color: #828a97; + padding: 0.5em; + font-weight: 400; +} + +#ExitBtn:hover{ + background-color: #b2bece +} +#ExitBtn:disabled { + background-color: rgba(185, 185, 185, 31); + color: rgba(64, 64, 64, 63); +} + +#ButtonWithOptions QPushButton{ + border-top-right-radius: 0px; + border-bottom-right-radius: 0px; + border: none; + background-color: rgb(84, 209, 178); + color: rgb(39, 39, 39); + font-weight: 400; + padding: 0.5em; +} +#ButtonWithOptions QPushButton:hover{ + background-color: rgb(85, 224, 189) +} +#ButtonWithOptions QPushButton:disabled { + background-color: rgba(72, 200, 150, 31); + color: rgba(64, 64, 64, 63); +} + +#ButtonWithOptions QToolButton{ + border: none; + border-top-left-radius: 0px; + border-bottom-left-radius: 0px; + border-top-right-radius: 3px; + border-bottom-right-radius: 3px; + background-color: rgb(84, 209, 178); + color: rgb(39, 39, 39); +} +#ButtonWithOptions QToolButton:hover{ + background-color: rgb(85, 224, 189) +} +#ButtonWithOptions QToolButton:disabled { + background-color: rgba(72, 200, 150, 31); + color: rgba(64, 64, 64, 63); +} diff --git a/igniter/tools.py b/igniter/tools.py index ff2db6bc7e..529d535c25 100644 --- a/igniter/tools.py +++ b/igniter/tools.py @@ -14,7 +14,12 @@ from pathlib import Path import platform from pymongo import MongoClient -from pymongo.errors import ServerSelectionTimeoutError, InvalidURI +from pymongo.errors import ( + ServerSelectionTimeoutError, + InvalidURI, + ConfigurationError, + OperationFailure +) def decompose_url(url: str) -> Dict: @@ -115,30 +120,20 @@ def validate_mongo_connection(cnx: str) -> (bool, str): parsed = urlparse(cnx) if parsed.scheme not in ["mongodb", "mongodb+srv"]: return False, "Not mongodb schema" - # we have mongo connection string. Let's try if we can connect. - try: - components = decompose_url(cnx) - except RuntimeError: - return False, f"Invalid port specified." - - mongo_args = { - "host": compose_url(**components), - "serverSelectionTimeoutMS": 2000 - } - port = components.get("port") - if port is not None: - mongo_args["port"] = int(port) try: - client = MongoClient(**mongo_args) + client = MongoClient( + cnx, + serverSelectionTimeoutMS=2000 + ) client.server_info() client.close() except ServerSelectionTimeoutError as e: return False, f"Cannot connect to server {cnx} - {e}" except ValueError: return False, f"Invalid port specified {parsed.port}" - except InvalidURI as e: - return False, str(e) + except (ConfigurationError, OperationFailure, InvalidURI) as exc: + return False, str(exc) else: return True, "Connection is successful" diff --git a/igniter/user_settings.py b/igniter/user_settings.py index 77fb8b5ae5..2a406f83dd 100644 --- a/igniter/user_settings.py +++ b/igniter/user_settings.py @@ -25,8 +25,112 @@ except ImportError: import platform -import appdirs import six +import appdirs + +_PLACEHOLDER = object() + + +class OpenPypeSecureRegistry: + """Store information using keyring. + + Registry should be used for private data that should be available only for + user. + + All passed registry names will have added prefix `OpenPype/` to easier + identify which data were created by OpenPype. + + Args: + name(str): Name of registry used as identifier for data. + """ + def __init__(self, name): + try: + import keyring + + except Exception: + raise NotImplementedError( + "Python module `keyring` is not available." + ) + + # hack for cx_freeze and Windows keyring backend + if platform.system().lower() == "windows": + from keyring.backends import Windows + + keyring.set_keyring(Windows.WinVaultKeyring()) + + # Force "OpenPype" prefix + self._name = "/".join(("OpenPype", name)) + + def set_item(self, name, value): + # type: (str, str) -> None + """Set sensitive item into system's keyring. + + This uses `Keyring module`_ to save sensitive stuff into system's + keyring. + + Args: + name (str): Name of the item. + value (str): Value of the item. + + .. _Keyring module: + https://github.com/jaraco/keyring + + """ + import keyring + + keyring.set_password(self._name, name, value) + + @lru_cache(maxsize=32) + def get_item(self, name, default=_PLACEHOLDER): + """Get value of sensitive item from system's keyring. + + See also `Keyring module`_ + + Args: + name (str): Name of the item. + default (Any): Default value if item is not available. + + Returns: + value (str): Value of the item. + + Raises: + ValueError: If item doesn't exist and default is not defined. + + .. _Keyring module: + https://github.com/jaraco/keyring + + """ + import keyring + + value = keyring.get_password(self._name, name) + if value: + return value + + if default is not _PLACEHOLDER: + return default + + # NOTE Should raise `KeyError` + raise ValueError( + "Item {}:{} does not exist in keyring.".format(self._name, name) + ) + + def delete_item(self, name): + # type: (str) -> None + """Delete value stored in system's keyring. + + See also `Keyring module`_ + + Args: + name (str): Name of the item to be deleted. + + .. _Keyring module: + https://github.com/jaraco/keyring + + """ + import keyring + + self.get_item.cache_clear() + keyring.delete_password(self._name, name) @six.add_metaclass(ABCMeta) @@ -46,13 +150,6 @@ class ASettingRegistry(): # type: (str) -> ASettingRegistry super(ASettingRegistry, self).__init__() - if six.PY3: - import keyring - # hack for cx_freeze and Windows keyring backend - if platform.system() == "Windows": - from keyring.backends import Windows - keyring.set_keyring(Windows.WinVaultKeyring()) - self._name = name self._items = {} @@ -127,78 +224,6 @@ class ASettingRegistry(): del self._items[name] self._delete_item(name) - def set_secure_item(self, name, value): - # type: (str, str) -> None - """Set sensitive item into system's keyring. - - This uses `Keyring module`_ to save sensitive stuff into system's - keyring. - - Args: - name (str): Name of the item. - value (str): Value of the item. - - .. _Keyring module: - https://github.com/jaraco/keyring - - """ - if six.PY2: - raise NotImplementedError( - "Keyring not available on Python 2 hosts") - import keyring - keyring.set_password(self._name, name, value) - - @lru_cache(maxsize=32) - def get_secure_item(self, name): - # type: (str) -> str - """Get value of sensitive item from system's keyring. - - See also `Keyring module`_ - - Args: - name (str): Name of the item. - - Returns: - value (str): Value of the item. - - Raises: - ValueError: If item doesn't exist. - - .. _Keyring module: - https://github.com/jaraco/keyring - - """ - if six.PY2: - raise NotImplementedError( - "Keyring not available on Python 2 hosts") - import keyring - value = keyring.get_password(self._name, name) - if not value: - raise ValueError( - "Item {}:{} does not exist in keyring.".format( - self._name, name)) - return value - - def delete_secure_item(self, name): - # type: (str) -> None - """Delete value stored in system's keyring. - - See also `Keyring module`_ - - Args: - name (str): Name of the item to be deleted. - - .. _Keyring module: - https://github.com/jaraco/keyring - - """ - if six.PY2: - raise NotImplementedError( - "Keyring not available on Python 2 hosts") - import keyring - self.get_secure_item.cache_clear() - keyring.delete_password(self._name, name) - class IniSettingRegistry(ASettingRegistry): """Class using :mod:`configparser`. @@ -459,9 +484,10 @@ class OpenPypeSettingsRegistry(JSONSettingRegistry): """ - def __init__(self): + def __init__(self, name=None): self.vendor = "pypeclub" self.product = "openpype" + if not name: + name = "openpype_settings" path = appdirs.user_data_dir(self.product, self.vendor) - super(OpenPypeSettingsRegistry, self).__init__( - "openpype_settings", path) + super(OpenPypeSettingsRegistry, self).__init__(name, path) diff --git a/inno_setup.iss b/inno_setup.iss new file mode 100644 index 0000000000..ead9907955 --- /dev/null +++ b/inno_setup.iss @@ -0,0 +1,50 @@ +; Script generated by the Inno Setup Script Wizard. +; SEE THE DOCUMENTATION FOR DETAILS ON CREATING INNO SETUP SCRIPT FILES! + + +#define MyAppName "OpenPype" +#define Build GetEnv("BUILD_DIR") +#define AppVer GetEnv("BUILD_VERSION") + + +[Setup] +; NOTE: The value of AppId uniquely identifies this application. Do not use the same AppId value in installers for other applications. +; (To generate a new GUID, click Tools | Generate GUID inside the IDE.) +AppId={{B9E9DF6A-5BDA-42DD-9F35-C09D564C4D93} +AppName={#MyAppName} +AppVersion={#AppVer} +AppVerName={#MyAppName} version {#AppVer} +AppPublisher=Orbi Tools s.r.o +AppPublisherURL=http://pype.club +AppSupportURL=http://pype.club +AppUpdatesURL=http://pype.club +DefaultDirName={autopf}\{#MyAppName} +DisableProgramGroupPage=yes +OutputBaseFilename={#MyAppName}-{#AppVer}-install +AllowCancelDuringInstall=yes +; Uncomment the following line to run in non administrative install mode (install for current user only.) +;PrivilegesRequired=lowest +PrivilegesRequiredOverridesAllowed=dialog +SetupIconFile=igniter\openpype.ico +OutputDir=build\ +Compression=lzma +SolidCompression=yes +WizardStyle=modern + +[Languages] +Name: "english"; MessagesFile: "compiler:Default.isl" + +[Tasks] +Name: "desktopicon"; Description: "{cm:CreateDesktopIcon}"; GroupDescription: "{cm:AdditionalIcons}"; Flags: unchecked + +[Files] +Source: "build\{#build}\*"; DestDir: "{app}"; Flags: ignoreversion recursesubdirs createallsubdirs +; NOTE: Don't use "Flags: ignoreversion" on any shared system files + +[Icons] +Name: "{autoprograms}\{#MyAppName}"; Filename: "{app}\openpype_gui.exe" +Name: "{autodesktop}\{#MyAppName}"; Filename: "{app}\openpype_gui.exe"; Tasks: desktopicon + +[Run] +Filename: "{app}\openpype_gui.exe"; Description: "{cm:LaunchProgram,OpenPype}"; Flags: nowait postinstall skipifsilent + diff --git a/openpype/__init__.py b/openpype/__init__.py index edd48a018d..f63d534e08 100644 --- a/openpype/__init__.py +++ b/openpype/__init__.py @@ -9,6 +9,7 @@ from .settings import get_project_settings from .lib import ( Anatomy, filter_pyblish_plugins, + set_plugin_attributes_from_settings, change_timer_to_current_context ) @@ -58,38 +59,8 @@ def patched_discover(superclass): # run original discover and get plugins plugins = _original_discover(superclass) - # determine host application to use for finding presets - if avalon.registered_host() is None: - return plugins - host = avalon.registered_host().__name__.split(".")[-1] + set_plugin_attributes_from_settings(plugins, superclass) - # map plugin superclass to preset json. Currenly suppoted is load and - # create (avalon.api.Loader and avalon.api.Creator) - plugin_type = "undefined" - if superclass.__name__.split(".")[-1] == "Loader": - plugin_type = "load" - elif superclass.__name__.split(".")[-1] == "Creator": - plugin_type = "create" - - print(">>> Finding presets for {}:{} ...".format(host, plugin_type)) - try: - settings = ( - get_project_settings(os.environ['AVALON_PROJECT']) - [host][plugin_type] - ) - except KeyError: - print("*** no presets found.") - else: - for plugin in plugins: - if plugin.__name__ in settings: - print(">>> We have preset for {}".format(plugin.__name__)) - for option, value in settings[plugin.__name__].items(): - if option == "enabled" and value is False: - setattr(plugin, "active", False) - print(" - is disabled by preset") - else: - setattr(plugin, option, value) - print(" - setting `{}`: `{}`".format(option, value)) return plugins diff --git a/openpype/hooks/pre_python2_vendor.py b/openpype/hooks/pre_python_2_prelaunch.py similarity index 83% rename from openpype/hooks/pre_python2_vendor.py rename to openpype/hooks/pre_python_2_prelaunch.py index 7aaf713dec..84272d2e5d 100644 --- a/openpype/hooks/pre_python2_vendor.py +++ b/openpype/hooks/pre_python_2_prelaunch.py @@ -4,14 +4,15 @@ from openpype.lib import PreLaunchHook class PrePython2Vendor(PreLaunchHook): """Prepend python 2 dependencies for py2 hosts.""" - # WARNING This hook will probably be deprecated in OpenPype 3 - kept for test order = 10 - app_groups = ["hiero", "nuke", "nukex"] def execute(self): + if not self.application.use_python_2: + return + # Prepare vendor dir path self.log.info("adding global python 2 vendor") - pype_root = os.getenv("OPENPYPE_ROOT") + pype_root = os.getenv("OPENPYPE_REPOS_ROOT") python_2_vendor = os.path.join( pype_root, "openpype", diff --git a/openpype/hooks/pre_with_windows_shell.py b/openpype/hooks/pre_with_windows_shell.py index 5f0f03f13e..0c10583b99 100644 --- a/openpype/hooks/pre_with_windows_shell.py +++ b/openpype/hooks/pre_with_windows_shell.py @@ -1,4 +1,5 @@ import os +import subprocess from openpype.lib import PreLaunchHook @@ -10,15 +11,32 @@ class LaunchWithWindowsShell(PreLaunchHook): instead. """ - order = 10 - app_groups = ["resolve", "nuke", "nukex", "hiero", "nukestudio"] + # Should be as last hook becuase must change launch arguments to string + order = 1000 + app_groups = ["nuke", "nukex", "hiero", "nukestudio"] platforms = ["windows"] def execute(self): - # Get comspec which is cmd.exe in most cases. - comspec = os.environ.get("COMSPEC", "cmd.exe") - # Add comspec to arguments list and add "/k" - new_args = [comspec, "/c"] - new_args.extend(self.launch_context.launch_args) + new_args = [ + # Get comspec which is cmd.exe in most cases. + os.environ.get("COMSPEC", "cmd.exe"), + # NOTE change to "/k" if want to keep console opened + "/c", + # Convert arguments to command line arguments (as string) + "\"{}\"".format( + subprocess.list2cmdline(self.launch_context.launch_args) + ) + ] + # Convert list to string + # WARNING this only works if is used as string + args_string = " ".join(new_args) + self.log.info(( + "Modified launch arguments to be launched with shell \"{}\"." + ).format(args_string)) + # Replace launch args with new one - self.launch_context.launch_args = new_args + self.launch_context.launch_args = args_string + # Change `creationflags` to CREATE_NEW_CONSOLE + self.launch_context.kwargs["creationflags"] = ( + subprocess.CREATE_NEW_CONSOLE + ) diff --git a/openpype/hosts/aftereffects/api/__init__.py b/openpype/hosts/aftereffects/api/__init__.py index ee73a0f52b..e914c26435 100644 --- a/openpype/hosts/aftereffects/api/__init__.py +++ b/openpype/hosts/aftereffects/api/__init__.py @@ -5,12 +5,12 @@ import logging from avalon import io from avalon import api as avalon from avalon.vendor import Qt -from openpype import lib +from openpype import lib, api import pyblish.api as pyblish import openpype.hosts.aftereffects -log = logging.getLogger("pype.hosts.aftereffects") +log = logging.getLogger("openpype.hosts.aftereffects") HOST_DIR = os.path.dirname(os.path.abspath(openpype.hosts.aftereffects.__file__)) @@ -81,3 +81,69 @@ def uninstall(): def on_pyblish_instance_toggled(instance, old_value, new_value): """Toggle layer visibility on instance toggles.""" instance[0].Visible = new_value + + +def get_asset_settings(): + """Get settings on current asset from database. + + Returns: + dict: Scene data. + + """ + asset_data = lib.get_asset()["data"] + fps = asset_data.get("fps") + frame_start = asset_data.get("frameStart") + frame_end = asset_data.get("frameEnd") + handle_start = asset_data.get("handleStart") + handle_end = asset_data.get("handleEnd") + resolution_width = asset_data.get("resolutionWidth") + resolution_height = asset_data.get("resolutionHeight") + duration = (frame_end - frame_start + 1) + handle_start + handle_end + entity_type = asset_data.get("entityType") + + scene_data = { + "fps": fps, + "frameStart": frame_start, + "frameEnd": frame_end, + "handleStart": handle_start, + "handleEnd": handle_end, + "resolutionWidth": resolution_width, + "resolutionHeight": resolution_height, + "duration": duration + } + + try: + # temporary, in pype3 replace with api.get_current_project_settings + skip_resolution_check = ( + api.get_current_project_settings() + ["plugins"] + ["aftereffects"] + ["publish"] + ["ValidateSceneSettings"] + ["skip_resolution_check"] + ) + skip_timelines_check = ( + api.get_current_project_settings() + ["plugins"] + ["aftereffects"] + ["publish"] + ["ValidateSceneSettings"] + ["skip_timelines_check"] + ) + except KeyError: + skip_resolution_check = ['*'] + skip_timelines_check = ['*'] + + if os.getenv('AVALON_TASK') in skip_resolution_check or \ + '*' in skip_timelines_check: + scene_data.pop("resolutionWidth") + scene_data.pop("resolutionHeight") + + if entity_type in skip_timelines_check or '*' in skip_timelines_check: + scene_data.pop('fps', None) + scene_data.pop('frameStart', None) + scene_data.pop('frameEnd', None) + scene_data.pop('handleStart', None) + scene_data.pop('handleEnd', None) + + return scene_data diff --git a/openpype/hosts/aftereffects/plugins/publish/collect_render.py b/openpype/hosts/aftereffects/plugins/publish/collect_render.py index 4a124991fd..baac64ed0c 100644 --- a/openpype/hosts/aftereffects/plugins/publish/collect_render.py +++ b/openpype/hosts/aftereffects/plugins/publish/collect_render.py @@ -12,6 +12,7 @@ class AERenderInstance(RenderInstance): # extend generic, composition name is needed comp_name = attr.ib(default=None) comp_id = attr.ib(default=None) + fps = attr.ib(default=None) class CollectAERender(abstract_collect_render.AbstractCollectRender): @@ -45,12 +46,21 @@ class CollectAERender(abstract_collect_render.AbstractCollectRender): raise ValueError("Couldn't find id, unable to publish. " + "Please recreate instance.") item_id = inst["members"][0] + work_area_info = self.stub.get_work_area(int(item_id)) + + if not work_area_info: + self.log.warning("Orphaned instance, deleting metadata") + self.stub.remove_instance(int(item_id)) + continue + frameStart = work_area_info.workAreaStart frameEnd = round(work_area_info.workAreaStart + float(work_area_info.workAreaDuration) * float(work_area_info.frameRate)) - 1 + fps = work_area_info.frameRate + # TODO add resolution when supported by extension if inst["family"] == "render" and inst["active"]: instance = AERenderInstance( @@ -80,7 +90,8 @@ class CollectAERender(abstract_collect_render.AbstractCollectRender): frameStart=frameStart, frameEnd=frameEnd, frameStep=1, - toBeRenderedOn='deadline' + toBeRenderedOn='deadline', + fps=fps ) comp = compositions_by_id.get(int(item_id)) @@ -96,7 +107,6 @@ class CollectAERender(abstract_collect_render.AbstractCollectRender): instances.append(instance) - self.log.debug("instances::{}".format(instances)) return instances def get_expected_files(self, render_instance): diff --git a/openpype/hosts/aftereffects/plugins/publish/remove_publish_highlight.py b/openpype/hosts/aftereffects/plugins/publish/remove_publish_highlight.py index 291f22e3b8..d2b3fd1b12 100644 --- a/openpype/hosts/aftereffects/plugins/publish/remove_publish_highlight.py +++ b/openpype/hosts/aftereffects/plugins/publish/remove_publish_highlight.py @@ -21,3 +21,4 @@ class RemovePublishHighlight(openpype.api.Extractor): item = instance.data comp_name = item["comp_name"].replace(stub.PUBLISH_ICON, '') stub.rename_item(item["comp_id"], comp_name) + instance.data["comp_name"] = comp_name diff --git a/openpype/hosts/aftereffects/plugins/publish/validate_scene_settings.py b/openpype/hosts/aftereffects/plugins/publish/validate_scene_settings.py new file mode 100644 index 0000000000..cc7db3141f --- /dev/null +++ b/openpype/hosts/aftereffects/plugins/publish/validate_scene_settings.py @@ -0,0 +1,110 @@ +# -*- coding: utf-8 -*- +"""Validate scene settings.""" +import os + +import pyblish.api + +from avalon import aftereffects + +import openpype.hosts.aftereffects.api as api + +stub = aftereffects.stub() + + +class ValidateSceneSettings(pyblish.api.InstancePlugin): + """ + Ensures that Composition Settings (right mouse on comp) are same as + in FTrack on task. + + By default checks only duration - how many frames should be rendered. + Compares: + Frame start - Frame end + 1 from FTrack + against + Duration in Composition Settings. + + If this complains: + Check error message where is discrepancy. + Check FTrack task 'pype' section of task attributes for expected + values. + Check/modify rendered Composition Settings. + + If you know what you are doing run publishing again, uncheck this + validation before Validation phase. + """ + + """ + Dev docu: + Could be configured by 'presets/plugins/aftereffects/publish' + + skip_timelines_check - fill task name for which skip validation of + frameStart + frameEnd + fps + handleStart + handleEnd + skip_resolution_check - fill entity type ('asset') to skip validation + resolutionWidth + resolutionHeight + TODO support in extension is missing for now + + By defaults validates duration (how many frames should be published) + """ + + order = pyblish.api.ValidatorOrder + label = "Validate Scene Settings" + families = ["render.farm"] + hosts = ["aftereffects"] + optional = True + + skip_timelines_check = ["*"] # * >> skip for all + skip_resolution_check = ["*"] + + def process(self, instance): + """Plugin entry point.""" + expected_settings = api.get_asset_settings() + self.log.info("expected_settings::{}".format(expected_settings)) + + # handle case where ftrack uses only two decimal places + # 23.976023976023978 vs. 23.98 + fps = instance.data.get("fps") + if fps: + if isinstance(fps, float): + fps = float( + "{:.2f}".format(fps)) + expected_settings["fps"] = fps + + duration = instance.data.get("frameEndHandle") - \ + instance.data.get("frameStartHandle") + 1 + + current_settings = { + "fps": fps, + "frameStartHandle": instance.data.get("frameStartHandle"), + "frameEndHandle": instance.data.get("frameEndHandle"), + "resolutionWidth": instance.data.get("resolutionWidth"), + "resolutionHeight": instance.data.get("resolutionHeight"), + "duration": duration + } + self.log.info("current_settings:: {}".format(current_settings)) + + invalid_settings = [] + for key, value in expected_settings.items(): + if value != current_settings[key]: + invalid_settings.append( + "{} expected: {} found: {}".format(key, value, + current_settings[key]) + ) + + if ((expected_settings.get("handleStart") + or expected_settings.get("handleEnd")) + and invalid_settings): + msg = "Handles included in calculation. Remove handles in DB " +\ + "or extend frame range in Composition Setting." + invalid_settings[-1]["reason"] = msg + + msg = "Found invalid settings:\n{}".format( + "\n".join(invalid_settings) + ) + assert not invalid_settings, msg + assert os.path.exists(instance.data.get("source")), ( + "Scene file not found (saved under wrong name)" + ) diff --git a/openpype/hosts/blender/api/__init__.py b/openpype/hosts/blender/api/__init__.py index 55c5b63f60..66102a2ae1 100644 --- a/openpype/hosts/blender/api/__init__.py +++ b/openpype/hosts/blender/api/__init__.py @@ -51,8 +51,37 @@ def set_start_end_frames(): "name": asset_name }) - bpy.context.scene.frame_start = asset_doc["data"]["frameStart"] - bpy.context.scene.frame_end = asset_doc["data"]["frameEnd"] + scene = bpy.context.scene + + # Default scene settings + frameStart = scene.frame_start + frameEnd = scene.frame_end + fps = scene.render.fps + resolution_x = scene.render.resolution_x + resolution_y = scene.render.resolution_y + + # Check if settings are set + data = asset_doc.get("data") + + if not data: + return + + if data.get("frameStart"): + frameStart = data.get("frameStart") + if data.get("frameEnd"): + frameEnd = data.get("frameEnd") + if data.get("fps"): + fps = data.get("fps") + if data.get("resolutionWidth"): + resolution_x = data.get("resolutionWidth") + if data.get("resolutionHeight"): + resolution_y = data.get("resolutionHeight") + + scene.frame_start = frameStart + scene.frame_end = frameEnd + scene.render.fps = fps + scene.render.resolution_x = resolution_x + scene.render.resolution_y = resolution_y def on_new(arg1, arg2): diff --git a/openpype/hosts/blender/api/plugin.py b/openpype/hosts/blender/api/plugin.py index eb88e7af63..de30da3319 100644 --- a/openpype/hosts/blender/api/plugin.py +++ b/openpype/hosts/blender/api/plugin.py @@ -9,7 +9,7 @@ from avalon import api import avalon.blender from openpype.api import PypeCreatorMixin -VALID_EXTENSIONS = [".blend", ".json"] +VALID_EXTENSIONS = [".blend", ".json", ".abc"] def asset_name( diff --git a/openpype/hosts/blender/hooks/pre_pyside_install.py b/openpype/hosts/blender/hooks/pre_pyside_install.py index 088a27566d..6d253300d9 100644 --- a/openpype/hosts/blender/hooks/pre_pyside_install.py +++ b/openpype/hosts/blender/hooks/pre_pyside_install.py @@ -1,4 +1,5 @@ import os +import re import subprocess from openpype.lib import PreLaunchHook @@ -31,10 +32,46 @@ class InstallPySideToBlender(PreLaunchHook): def inner_execute(self): # Get blender's python directory + version_regex = re.compile(r"^2\.[0-9]{2}$") + executable = self.launch_context.executable.executable_path - # Blender installation contain subfolder named with it's version where - # python binaries are stored. - version_subfolder = self.launch_context.app_name.split("_")[1] + if os.path.basename(executable).lower() != "blender.exe": + self.log.info(( + "Executable does not lead to blender.exe file. Can't determine" + " blender's python to check/install PySide2." + )) + return + + executable_dir = os.path.dirname(executable) + version_subfolders = [] + for name in os.listdir(executable_dir): + fullpath = os.path.join(name, executable_dir) + if not os.path.isdir(fullpath): + continue + + if not version_regex.match(name): + continue + + version_subfolders.append(name) + + if not version_subfolders: + self.log.info( + "Didn't find version subfolder next to Blender executable" + ) + return + + if len(version_subfolders) > 1: + self.log.info(( + "Found more than one version subfolder next" + " to blender executable. {}" + ).format(", ".join([ + '"./{}"'.format(name) + for name in version_subfolders + ]))) + return + + version_subfolder = version_subfolders[0] + pythond_dir = os.path.join( os.path.dirname(executable), version_subfolder, @@ -65,6 +102,7 @@ class InstallPySideToBlender(PreLaunchHook): # Check if PySide2 is installed and skip if yes if self.is_pyside_installed(python_executable): + self.log.debug("Blender has already installed PySide2.") return # Install PySide2 in blender's python diff --git a/openpype/hosts/blender/plugins/create/create_pointcache.py b/openpype/hosts/blender/plugins/create/create_pointcache.py new file mode 100644 index 0000000000..03a468f82e --- /dev/null +++ b/openpype/hosts/blender/plugins/create/create_pointcache.py @@ -0,0 +1,35 @@ +"""Create a pointcache asset.""" + +import bpy + +from avalon import api +from avalon.blender import lib +import openpype.hosts.blender.api.plugin + + +class CreatePointcache(openpype.hosts.blender.api.plugin.Creator): + """Polygonal static geometry""" + + name = "pointcacheMain" + label = "Point Cache" + family = "pointcache" + icon = "gears" + + def process(self): + + asset = self.data["asset"] + subset = self.data["subset"] + name = openpype.hosts.blender.api.plugin.asset_name(asset, subset) + collection = bpy.data.collections.new(name=name) + bpy.context.scene.collection.children.link(collection) + self.data['task'] = api.Session.get('AVALON_TASK') + lib.imprint(collection, self.data) + + if (self.options or {}).get("useSelection"): + objects = lib.get_selection() + for obj in objects: + collection.objects.link(obj) + if obj.type == 'EMPTY': + objects.extend(obj.children) + + return collection diff --git a/openpype/hosts/blender/plugins/load/load_abc.py b/openpype/hosts/blender/plugins/load/load_abc.py new file mode 100644 index 0000000000..4248cffd69 --- /dev/null +++ b/openpype/hosts/blender/plugins/load/load_abc.py @@ -0,0 +1,246 @@ +"""Load an asset in Blender from an Alembic file.""" + +from pathlib import Path +from pprint import pformat +from typing import Dict, List, Optional + +from avalon import api, blender +import bpy +import openpype.hosts.blender.api.plugin as plugin + + +class CacheModelLoader(plugin.AssetLoader): + """Load cache models. + + Stores the imported asset in a collection named after the asset. + + Note: + At least for now it only supports Alembic files. + """ + + families = ["model", "pointcache"] + representations = ["abc"] + + label = "Link Alembic" + icon = "code-fork" + color = "orange" + + def _remove(self, objects, container): + for obj in list(objects): + if obj.type == 'MESH': + bpy.data.meshes.remove(obj.data) + elif obj.type == 'EMPTY': + bpy.data.objects.remove(obj) + + bpy.data.collections.remove(container) + + def _process(self, libpath, container_name, parent_collection): + bpy.ops.object.select_all(action='DESELECT') + + view_layer = bpy.context.view_layer + view_layer_collection = view_layer.active_layer_collection.collection + + relative = bpy.context.preferences.filepaths.use_relative_paths + bpy.ops.wm.alembic_import( + filepath=libpath, + relative_path=relative + ) + + parent = parent_collection + + if parent is None: + parent = bpy.context.scene.collection + + model_container = bpy.data.collections.new(container_name) + parent.children.link(model_container) + for obj in bpy.context.selected_objects: + model_container.objects.link(obj) + view_layer_collection.objects.unlink(obj) + + name = obj.name + obj.name = f"{name}:{container_name}" + + # Groups are imported as Empty objects in Blender + if obj.type == 'MESH': + data_name = obj.data.name + obj.data.name = f"{data_name}:{container_name}" + + if not obj.get(blender.pipeline.AVALON_PROPERTY): + obj[blender.pipeline.AVALON_PROPERTY] = dict() + + avalon_info = obj[blender.pipeline.AVALON_PROPERTY] + avalon_info.update({"container_name": container_name}) + + bpy.ops.object.select_all(action='DESELECT') + + return model_container + + def process_asset( + self, context: dict, name: str, namespace: Optional[str] = None, + options: Optional[Dict] = None + ) -> Optional[List]: + """ + Arguments: + name: Use pre-defined name + namespace: Use pre-defined namespace + context: Full parenthood of representation to load + options: Additional settings dictionary + """ + + libpath = self.fname + asset = context["asset"]["name"] + subset = context["subset"]["name"] + + lib_container = plugin.asset_name( + asset, subset + ) + unique_number = plugin.get_unique_number( + asset, subset + ) + namespace = namespace or f"{asset}_{unique_number}" + container_name = plugin.asset_name( + asset, subset, unique_number + ) + + container = bpy.data.collections.new(lib_container) + container.name = container_name + blender.pipeline.containerise_existing( + container, + name, + namespace, + context, + self.__class__.__name__, + ) + + container_metadata = container.get( + blender.pipeline.AVALON_PROPERTY) + + container_metadata["libpath"] = libpath + container_metadata["lib_container"] = lib_container + + obj_container = self._process( + libpath, container_name, None) + + container_metadata["obj_container"] = obj_container + + # Save the list of objects in the metadata container + container_metadata["objects"] = obj_container.all_objects + + nodes = list(container.objects) + nodes.append(container) + self[:] = nodes + return nodes + + def update(self, container: Dict, representation: Dict): + """Update the loaded asset. + + This will remove all objects of the current collection, load the new + ones and add them to the collection. + If the objects of the collection are used in another collection they + will not be removed, only unlinked. Normally this should not be the + case though. + + Warning: + No nested collections are supported at the moment! + """ + collection = bpy.data.collections.get( + container["objectName"] + ) + libpath = Path(api.get_representation_path(representation)) + extension = libpath.suffix.lower() + + self.log.info( + "Container: %s\nRepresentation: %s", + pformat(container, indent=2), + pformat(representation, indent=2), + ) + + assert collection, ( + f"The asset is not loaded: {container['objectName']}" + ) + assert not (collection.children), ( + "Nested collections are not supported." + ) + assert libpath, ( + "No existing library file found for {container['objectName']}" + ) + assert libpath.is_file(), ( + f"The file doesn't exist: {libpath}" + ) + assert extension in plugin.VALID_EXTENSIONS, ( + f"Unsupported file: {libpath}" + ) + + collection_metadata = collection.get( + blender.pipeline.AVALON_PROPERTY) + collection_libpath = collection_metadata["libpath"] + + obj_container = plugin.get_local_collection_with_name( + collection_metadata["obj_container"].name + ) + objects = obj_container.all_objects + + container_name = obj_container.name + + normalized_collection_libpath = ( + str(Path(bpy.path.abspath(collection_libpath)).resolve()) + ) + normalized_libpath = ( + str(Path(bpy.path.abspath(str(libpath))).resolve()) + ) + self.log.debug( + "normalized_collection_libpath:\n %s\nnormalized_libpath:\n %s", + normalized_collection_libpath, + normalized_libpath, + ) + if normalized_collection_libpath == normalized_libpath: + self.log.info("Library already loaded, not updating...") + return + + parent = plugin.get_parent_collection(obj_container) + + self._remove(objects, obj_container) + + obj_container = self._process( + str(libpath), container_name, parent) + + collection_metadata["obj_container"] = obj_container + collection_metadata["objects"] = obj_container.all_objects + collection_metadata["libpath"] = str(libpath) + collection_metadata["representation"] = str(representation["_id"]) + + def remove(self, container: Dict) -> bool: + """Remove an existing container from a Blender scene. + + Arguments: + container (openpype:container-1.0): Container to remove, + from `host.ls()`. + + Returns: + bool: Whether the container was deleted. + + Warning: + No nested collections are supported at the moment! + """ + collection = bpy.data.collections.get( + container["objectName"] + ) + if not collection: + return False + assert not (collection.children), ( + "Nested collections are not supported." + ) + + collection_metadata = collection.get( + blender.pipeline.AVALON_PROPERTY) + + obj_container = plugin.get_local_collection_with_name( + collection_metadata["obj_container"].name + ) + objects = obj_container.all_objects + + self._remove(objects, obj_container) + + bpy.data.collections.remove(collection) + + return True diff --git a/openpype/hosts/blender/plugins/load/load_layout.py b/openpype/hosts/blender/plugins/load/load_layout.py index 73b12d8c25..f1f2fdcddd 100644 --- a/openpype/hosts/blender/plugins/load/load_layout.py +++ b/openpype/hosts/blender/plugins/load/load_layout.py @@ -292,6 +292,9 @@ class UnrealLayoutLoader(plugin.AssetLoader): icon = "code-fork" color = "orange" + animation_creator_name = "CreateAnimation" + setdress_creator_name = "CreateSetDress" + def _remove_objects(self, objects): for obj in list(objects): if obj.type == 'ARMATURE': @@ -368,7 +371,7 @@ class UnrealLayoutLoader(plugin.AssetLoader): location.get('z') ) obj.rotation_euler = ( - rotation.get('x'), + rotation.get('x') + math.pi / 2, -rotation.get('y'), -rotation.get('z') ) diff --git a/openpype/hosts/blender/plugins/load/load_model.py b/openpype/hosts/blender/plugins/load/load_model.py index 7297e459a6..d645bedfcc 100644 --- a/openpype/hosts/blender/plugins/load/load_model.py +++ b/openpype/hosts/blender/plugins/load/load_model.py @@ -242,65 +242,3 @@ class BlendModelLoader(plugin.AssetLoader): bpy.data.collections.remove(collection) return True - - -class CacheModelLoader(plugin.AssetLoader): - """Load cache models. - - Stores the imported asset in a collection named after the asset. - - Note: - At least for now it only supports Alembic files. - """ - - families = ["model"] - representations = ["abc"] - - label = "Link Model" - icon = "code-fork" - color = "orange" - - def process_asset( - self, context: dict, name: str, namespace: Optional[str] = None, - options: Optional[Dict] = None - ) -> Optional[List]: - """ - Arguments: - name: Use pre-defined name - namespace: Use pre-defined namespace - context: Full parenthood of representation to load - options: Additional settings dictionary - """ - raise NotImplementedError( - "Loading of Alembic files is not yet implemented.") - # TODO (jasper): implement Alembic import. - - libpath = self.fname - asset = context["asset"]["name"] - subset = context["subset"]["name"] - # TODO (jasper): evaluate use of namespace which is 'alien' to Blender. - lib_container = container_name = ( - plugin.asset_name(asset, subset, namespace) - ) - relative = bpy.context.preferences.filepaths.use_relative_paths - - with bpy.data.libraries.load( - libpath, link=True, relative=relative - ) as (data_from, data_to): - data_to.collections = [lib_container] - - scene = bpy.context.scene - instance_empty = bpy.data.objects.new( - container_name, None - ) - scene.collection.objects.link(instance_empty) - instance_empty.instance_type = 'COLLECTION' - collection = bpy.data.collections[lib_container] - collection.name = container_name - instance_empty.instance_collection = collection - - nodes = list(collection.objects) - nodes.append(collection) - nodes.append(instance_empty) - self[:] = nodes - return nodes diff --git a/openpype/hosts/blender/plugins/publish/extract_abc.py b/openpype/hosts/blender/plugins/publish/extract_abc.py index 6a89c6019b..a6315908fc 100644 --- a/openpype/hosts/blender/plugins/publish/extract_abc.py +++ b/openpype/hosts/blender/plugins/publish/extract_abc.py @@ -11,14 +11,14 @@ class ExtractABC(openpype.api.Extractor): label = "Extract ABC" hosts = ["blender"] - families = ["model"] + families = ["model", "pointcache"] optional = True def process(self, instance): # Define extract output file path stagingdir = self.staging_dir(instance) - filename = f"{instance.name}.fbx" + filename = f"{instance.name}.abc" filepath = os.path.join(stagingdir, filename) context = bpy.context @@ -52,6 +52,8 @@ class ExtractABC(openpype.api.Extractor): old_scale = scene.unit_settings.scale_length + bpy.ops.object.select_all(action='DESELECT') + selected = list() for obj in instance: @@ -67,14 +69,11 @@ class ExtractABC(openpype.api.Extractor): # We set the scale of the scene for the export scene.unit_settings.scale_length = 0.01 - self.log.info(new_context) - # We export the abc bpy.ops.wm.alembic_export( new_context, filepath=filepath, - start=1, - end=1 + selected=True ) view_layer.active_layer_collection = old_active_layer_collection diff --git a/openpype/hosts/blender/plugins/publish/validate_object_mode.py b/openpype/hosts/blender/plugins/publish/validate_object_mode.py new file mode 100644 index 0000000000..1c82628c1c --- /dev/null +++ b/openpype/hosts/blender/plugins/publish/validate_object_mode.py @@ -0,0 +1,35 @@ +from typing import List + +import pyblish.api +import openpype.hosts.blender.api.action + + +class ValidateObjectIsInObjectMode(pyblish.api.InstancePlugin): + """Validate that the current object is in Object Mode.""" + + order = pyblish.api.ValidatorOrder - 0.01 + hosts = ["blender"] + families = ["model", "rig"] + category = "geometry" + label = "Object is in Object Mode" + actions = [openpype.hosts.blender.api.action.SelectInvalidAction] + optional = True + + @classmethod + def get_invalid(cls, instance) -> List: + invalid = [] + for obj in [obj for obj in instance]: + try: + if obj.type == 'MESH' or obj.type == 'ARMATURE': + # Check if the object is in object mode. + if not obj.mode == 'OBJECT': + invalid.append(obj) + except Exception: + continue + return invalid + + def process(self, instance): + invalid = self.get_invalid(instance) + if invalid: + raise RuntimeError( + f"Object found in instance is not in Object Mode: {invalid}") diff --git a/openpype/hosts/celaction/api/cli.py b/openpype/hosts/celaction/api/cli.py index 0a70610acb..acd9da8229 100644 --- a/openpype/hosts/celaction/api/cli.py +++ b/openpype/hosts/celaction/api/cli.py @@ -57,27 +57,12 @@ def _prepare_publish_environments(): project_name = os.getenv("AVALON_PROJECT") asset_name = os.getenv("AVALON_ASSET") - io.install() - project_doc = io.find_one({ - "type": "project" - }) - av_asset = io.find_one({ - "type": "asset", - "name": asset_name - }) - parents = av_asset["data"]["parents"] - hierarchy = "" - if parents: - hierarchy = "/".join(parents) - env["AVALON_PROJECT"] = project_name env["AVALON_ASSET"] = asset_name env["AVALON_TASK"] = os.getenv("AVALON_TASK") env["AVALON_WORKDIR"] = os.getenv("AVALON_WORKDIR") - env["AVALON_HIERARCHY"] = hierarchy - env["AVALON_PROJECTCODE"] = project_doc["data"].get("code", "") env["AVALON_APP"] = f"hosts.{publish_host}" - env["AVALON_APP_NAME"] = "celaction_local" + env["AVALON_APP_NAME"] = "celaction/local" env["PYBLISH_HOSTS"] = publish_host diff --git a/openpype/hosts/fusion/api/pipeline.py b/openpype/hosts/fusion/api/pipeline.py index 4fec548993..0095530087 100644 --- a/openpype/hosts/fusion/api/pipeline.py +++ b/openpype/hosts/fusion/api/pipeline.py @@ -39,7 +39,7 @@ def install(): avalon.data["familiesStateDefault"] = False avalon.data["familiesStateToggled"] = family_states - log.info("pype.hosts.fusion installed") + log.info("openpype.hosts.fusion installed") pyblish.register_host("fusion") pyblish.register_plugin_path(PUBLISH_PATH) diff --git a/openpype/hosts/fusion/plugins/publish/collect_render_target.py b/openpype/hosts/fusion/plugins/publish/collect_render_target.py index 50cc4fd3e9..39017f32e0 100644 --- a/openpype/hosts/fusion/plugins/publish/collect_render_target.py +++ b/openpype/hosts/fusion/plugins/publish/collect_render_target.py @@ -13,7 +13,7 @@ class CollectFusionRenderMode(pyblish.api.InstancePlugin): available tool does not visualize which render mode is set for the current comp, please run the following line in the console (Py2) - comp.GetData("pype.rendermode") + comp.GetData("openpype.rendermode") This will return the name of the current render mode as seen above under Options. @@ -34,7 +34,7 @@ class CollectFusionRenderMode(pyblish.api.InstancePlugin): raise RuntimeError("No comp previously collected, unable to " "retrieve Fusion version.") - rendermode = comp.GetData("pype.rendermode") or "local" + rendermode = comp.GetData("openpype.rendermode") or "local" assert rendermode in options, "Must be supported render mode" self.log.info("Render mode: {0}".format(rendermode)) diff --git a/openpype/hosts/fusion/scripts/set_rendermode.py b/openpype/hosts/fusion/scripts/set_rendermode.py index cb0b9da513..cb104445a8 100644 --- a/openpype/hosts/fusion/scripts/set_rendermode.py +++ b/openpype/hosts/fusion/scripts/set_rendermode.py @@ -96,11 +96,11 @@ class SetRenderMode(QtWidgets.QWidget): return self._comp.GetAttrs("COMPS_Name") def _get_comp_rendermode(self): - return self._comp.GetData("pype.rendermode") or "local" + return self._comp.GetData("openpype.rendermode") or "local" def _set_comp_rendermode(self): rendermode = self.mode_options.currentText() - self._comp.SetData("pype.rendermode", rendermode) + self._comp.SetData("openpype.rendermode", rendermode) self._comp.Print("Updated render mode to '%s'\n" % rendermode) self.hide() diff --git a/openpype/hosts/hiero/api/__init__.py b/openpype/hosts/hiero/api/__init__.py index fcb1d50ea8..8d0105ae5f 100644 --- a/openpype/hosts/hiero/api/__init__.py +++ b/openpype/hosts/hiero/api/__init__.py @@ -22,6 +22,7 @@ from .pipeline import ( ) from .lib import ( + pype_tag_name, get_track_items, get_current_project, get_current_sequence, @@ -73,6 +74,7 @@ __all__ = [ "work_root", # Lib functions + "pype_tag_name", "get_track_items", "get_current_project", "get_current_sequence", diff --git a/openpype/hosts/hiero/api/events.py b/openpype/hosts/hiero/api/events.py index c02e3e2ac4..3df095f9e4 100644 --- a/openpype/hosts/hiero/api/events.py +++ b/openpype/hosts/hiero/api/events.py @@ -2,7 +2,12 @@ import os import hiero.core.events import avalon.api as avalon from openpype.api import Logger -from .lib import sync_avalon_data_to_workfile, launch_workfiles_app +from .lib import ( + sync_avalon_data_to_workfile, + launch_workfiles_app, + selection_changed_timeline, + before_project_save +) from .tags import add_tags_to_workfile from .menu import update_menu_task_label @@ -78,7 +83,7 @@ def register_hiero_events(): "Registering events for: kBeforeNewProjectCreated, " "kAfterNewProjectCreated, kBeforeProjectLoad, kAfterProjectLoad, " "kBeforeProjectSave, kAfterProjectSave, kBeforeProjectClose, " - "kAfterProjectClose, kShutdown, kStartup" + "kAfterProjectClose, kShutdown, kStartup, kSelectionChanged" ) # hiero.core.events.registerInterest( @@ -91,8 +96,8 @@ def register_hiero_events(): hiero.core.events.registerInterest( "kAfterProjectLoad", afterProjectLoad) - # hiero.core.events.registerInterest( - # "kBeforeProjectSave", beforeProjectSaved) + hiero.core.events.registerInterest( + "kBeforeProjectSave", before_project_save) # hiero.core.events.registerInterest( # "kAfterProjectSave", afterProjectSaved) # @@ -104,10 +109,16 @@ def register_hiero_events(): # hiero.core.events.registerInterest("kShutdown", shutDown) # hiero.core.events.registerInterest("kStartup", startupCompleted) - # workfiles - hiero.core.events.registerEventType("kStartWorkfiles") - hiero.core.events.registerInterest("kStartWorkfiles", launch_workfiles_app) + hiero.core.events.registerInterest( + ("kSelectionChanged", "kTimeline"), selection_changed_timeline) + # workfiles + try: + hiero.core.events.registerEventType("kStartWorkfiles") + hiero.core.events.registerInterest( + "kStartWorkfiles", launch_workfiles_app) + except RuntimeError: + pass def register_events(): """ diff --git a/openpype/hosts/hiero/api/lib.py b/openpype/hosts/hiero/api/lib.py index b74e70cae3..a9982d96c4 100644 --- a/openpype/hosts/hiero/api/lib.py +++ b/openpype/hosts/hiero/api/lib.py @@ -9,7 +9,7 @@ import hiero import avalon.api as avalon import avalon.io from avalon.vendor.Qt import QtWidgets -from openpype.api import (Logger, Anatomy, config) +from openpype.api import (Logger, Anatomy, get_anatomy_settings) from . import tags import shutil from compiler.ast import flatten @@ -30,9 +30,9 @@ self = sys.modules[__name__] self._has_been_setup = False self._has_menu = False self._registered_gui = None -self.pype_tag_name = "Pype Data" -self.default_sequence_name = "PypeSequence" -self.default_bin_name = "PypeBin" +self.pype_tag_name = "openpypeData" +self.default_sequence_name = "openpypeSequence" +self.default_bin_name = "openpypeBin" AVALON_CONFIG = os.getenv("AVALON_CONFIG", "pype") @@ -150,15 +150,27 @@ def get_track_items( # get selected track items or all in active sequence if selected: - selected_items = list(hiero.selection) - for item in selected_items: - if track_name and track_name in item.parent().name(): - # filter only items fitting input track name - track_items.append(item) - elif not track_name: - # or add all if no track_name was defined - track_items.append(item) - else: + try: + selected_items = list(hiero.selection) + for item in selected_items: + if track_name and track_name in item.parent().name(): + # filter only items fitting input track name + track_items.append(item) + elif not track_name: + # or add all if no track_name was defined + track_items.append(item) + except AttributeError: + pass + + # check if any collected track items are + # `core.Hiero.Python.TrackItem` instance + if track_items: + any_track_item = track_items[0] + if not isinstance(any_track_item, hiero.core.TrackItem): + selected_items = [] + + # collect all available active sequence track items + if not track_items: sequence = get_current_sequence(name=sequence_name) # get all available tracks from sequence tracks = list(sequence.audioTracks()) + list(sequence.videoTracks()) @@ -240,7 +252,7 @@ def set_track_item_pype_tag(track_item, data=None): # basic Tag's attribute tag_data = { "editable": "0", - "note": "Pype data holder", + "note": "OpenPype data container", "icon": "openpype_icon.png", "metadata": {k: v for k, v in data.items()} } @@ -744,10 +756,13 @@ def _set_hrox_project_knobs(doc, **knobs): # set attributes to Project Tag proj_elem = doc.documentElement().firstChildElement("Project") for k, v in knobs.items(): - proj_elem.setAttribute(k, v) + if isinstance(v, dict): + continue + proj_elem.setAttribute(str(k), v) def apply_colorspace_project(): + project_name = os.getenv("AVALON_PROJECT") # get path the the active projects project = get_current_project(remove_untitled=True) current_file = project.path() @@ -756,9 +771,9 @@ def apply_colorspace_project(): project.close() # get presets for hiero - presets = config.get_init_presets() - colorspace = presets["colorspace"] - hiero_project_clrs = colorspace.get("hiero", {}).get("project", {}) + imageio = get_anatomy_settings( + project_name)["imageio"].get("hiero", None) + presets = imageio.get("workfile") # save the workfile as subversion "comment:_colorspaceChange" split_current_file = os.path.splitext(current_file) @@ -789,13 +804,13 @@ def apply_colorspace_project(): os.remove(copy_current_file_tmp) # use the code from bellow for changing xml hrox Attributes - hiero_project_clrs.update({"name": os.path.basename(copy_current_file)}) + presets.update({"name": os.path.basename(copy_current_file)}) # read HROX in as QDomSocument doc = _read_doc_from_path(copy_current_file) # apply project colorspace properties - _set_hrox_project_knobs(doc, **hiero_project_clrs) + _set_hrox_project_knobs(doc, **presets) # write QDomSocument back as HROX _write_doc_to_path(doc, copy_current_file) @@ -805,14 +820,17 @@ def apply_colorspace_project(): def apply_colorspace_clips(): + project_name = os.getenv("AVALON_PROJECT") project = get_current_project(remove_untitled=True) clips = project.clips() # get presets for hiero - presets = config.get_init_presets() - colorspace = presets["colorspace"] - hiero_clips_clrs = colorspace.get("hiero", {}).get("clips", {}) + imageio = get_anatomy_settings( + project_name)["imageio"].get("hiero", None) + from pprint import pprint + presets = imageio.get("regexInputs", {}).get("inputs", {}) + pprint(presets) for clip in clips: clip_media_source_path = clip.mediaSource().firstpath() clip_name = clip.name() @@ -822,10 +840,11 @@ def apply_colorspace_clips(): continue # check if any colorspace presets for read is mathing - preset_clrsp = next((hiero_clips_clrs[k] - for k in hiero_clips_clrs - if bool(re.search(k, clip_media_source_path))), - None) + preset_clrsp = None + for k in presets: + if not bool(re.search(k["regex"], clip_media_source_path)): + continue + preset_clrsp = k["colorspace"] if preset_clrsp: log.debug("Changing clip.path: {}".format(clip_media_source_path)) @@ -893,3 +912,61 @@ def get_sequence_pattern_and_padding(file): return found, padding else: return None, None + + +def sync_clip_name_to_data_asset(track_items_list): + # loop trough all selected clips + for track_item in track_items_list: + # ignore if parent track is locked or disabled + if track_item.parent().isLocked(): + continue + if not track_item.parent().isEnabled(): + continue + # ignore if the track item is disabled + if not track_item.isEnabled(): + continue + + # get name and data + ti_name = track_item.name() + data = get_track_item_pype_data(track_item) + + # ignore if no data on the clip or not publish instance + if not data: + continue + if data.get("id") != "pyblish.avalon.instance": + continue + + # fix data if wrong name + if data["asset"] != ti_name: + data["asset"] = ti_name + # remove the original tag + tag = get_track_item_pype_tag(track_item) + track_item.removeTag(tag) + # create new tag with updated data + set_track_item_pype_tag(track_item, data) + print("asset was changed in clip: {}".format(ti_name)) + + +def selection_changed_timeline(event): + """Callback on timeline to check if asset in data is the same as clip name. + + Args: + event (hiero.core.Event): timeline event + """ + timeline_editor = event.sender + selection = timeline_editor.selection() + + # run checking function + sync_clip_name_to_data_asset(selection) + + +def before_project_save(event): + track_items = get_track_items( + selected=False, + track_type="video", + check_enabled=True, + check_locked=True, + check_tagged=True) + + # run checking function + sync_clip_name_to_data_asset(track_items) diff --git a/openpype/hosts/hiero/api/menu.py b/openpype/hosts/hiero/api/menu.py index 9ccf5e39d1..ab49251093 100644 --- a/openpype/hosts/hiero/api/menu.py +++ b/openpype/hosts/hiero/api/menu.py @@ -68,50 +68,45 @@ def menu_install(): menu.addSeparator() - workfiles_action = menu.addAction("Work Files...") + workfiles_action = menu.addAction("Work Files ...") workfiles_action.setIcon(QtGui.QIcon("icons:Position.png")) workfiles_action.triggered.connect(launch_workfiles_app) - default_tags_action = menu.addAction("Create Default Tags...") + default_tags_action = menu.addAction("Create Default Tags") default_tags_action.setIcon(QtGui.QIcon("icons:Position.png")) default_tags_action.triggered.connect(tags.add_tags_to_workfile) menu.addSeparator() - publish_action = menu.addAction("Publish...") + publish_action = menu.addAction("Publish ...") publish_action.setIcon(QtGui.QIcon("icons:Output.png")) publish_action.triggered.connect( lambda *args: publish(hiero.ui.mainWindow()) ) - creator_action = menu.addAction("Create...") + creator_action = menu.addAction("Create ...") creator_action.setIcon(QtGui.QIcon("icons:CopyRectangle.png")) creator_action.triggered.connect(creator.show) - loader_action = menu.addAction("Load...") + loader_action = menu.addAction("Load ...") loader_action.setIcon(QtGui.QIcon("icons:CopyRectangle.png")) loader_action.triggered.connect(cbloader.show) - sceneinventory_action = menu.addAction("Manage...") + sceneinventory_action = menu.addAction("Manage ...") sceneinventory_action.setIcon(QtGui.QIcon("icons:CopyRectangle.png")) sceneinventory_action.triggered.connect(sceneinventory.show) menu.addSeparator() - reload_action = menu.addAction("Reload pipeline...") - reload_action.setIcon(QtGui.QIcon("icons:ColorAdd.png")) - reload_action.triggered.connect(reload_config) + if os.getenv("OPENPYPE_DEVELOP"): + reload_action = menu.addAction("Reload pipeline") + reload_action.setIcon(QtGui.QIcon("icons:ColorAdd.png")) + reload_action.triggered.connect(reload_config) menu.addSeparator() - apply_colorspace_p_action = menu.addAction("Apply Colorspace Project...") + apply_colorspace_p_action = menu.addAction("Apply Colorspace Project") apply_colorspace_p_action.setIcon(QtGui.QIcon("icons:ColorAdd.png")) apply_colorspace_p_action.triggered.connect(apply_colorspace_project) - apply_colorspace_c_action = menu.addAction("Apply Colorspace Clips...") + apply_colorspace_c_action = menu.addAction("Apply Colorspace Clips") apply_colorspace_c_action.setIcon(QtGui.QIcon("icons:ColorAdd.png")) apply_colorspace_c_action.triggered.connect(apply_colorspace_clips) - - self.context_label_action = context_label_action - self.workfile_actions = workfiles_action - self.default_tags_action = default_tags_action - self.publish_action = publish_action - self.reload_action = reload_action diff --git a/openpype/hosts/hiero/api/plugin.py b/openpype/hosts/hiero/api/plugin.py index 92e15cfae4..c46ef9abfa 100644 --- a/openpype/hosts/hiero/api/plugin.py +++ b/openpype/hosts/hiero/api/plugin.py @@ -4,10 +4,10 @@ import hiero from Qt import QtWidgets, QtCore from avalon.vendor import qargparse import avalon.api as avalon -import openpype.api as pype +import openpype.api as openpype from . import lib -log = pype.Logger().get_logger(__name__) +log = openpype.Logger().get_logger(__name__) def load_stylesheet(): @@ -266,7 +266,8 @@ class CreatorWidget(QtWidgets.QDialog): elif v["type"] == "QSpinBox": data[k]["value"] = self.create_row( content_layout, "QSpinBox", v["label"], - setValue=v["value"], setMaximum=10000, setToolTip=tool_tip) + setValue=v["value"], setMinimum=0, + setMaximum=100000, setToolTip=tool_tip) return data @@ -387,7 +388,8 @@ class ClipLoader: # try to get value from options or evaluate key value for `load_to` self.new_sequence = options.get("newSequence") or bool( "New timeline" in options.get("load_to", "")) - + self.clip_name_template = options.get( + "clipNameTemplate") or "{asset}_{subset}_{representation}" assert self._populate_data(), str( "Cannot Load selected data, look into database " "or call your supervisor") @@ -432,7 +434,7 @@ class ClipLoader: asset = str(repr_cntx["asset"]) subset = str(repr_cntx["subset"]) representation = str(repr_cntx["representation"]) - self.data["clip_name"] = "_".join([asset, subset, representation]) + self.data["clip_name"] = self.clip_name_template.format(**repr_cntx) self.data["track_name"] = "_".join([subset, representation]) self.data["versionData"] = self.context["version"]["data"] # gets file path @@ -476,7 +478,7 @@ class ClipLoader: """ asset_name = self.context["representation"]["context"]["asset"] - self.data["assetData"] = pype.get_asset(asset_name)["data"] + self.data["assetData"] = openpype.get_asset(asset_name)["data"] def _make_track_item(self, source_bin_item, audio=False): """ Create track item with """ @@ -543,15 +545,9 @@ class ClipLoader: if "slate" in f), # if nothing was found then use default None # so other bool could be used - None) or bool((( - # put together duration of clip attributes - self.timeline_out - self.timeline_in + 1) \ - + self.handle_start \ - + self.handle_end - # and compare it with meda duration - ) > self.media_duration) - - print("__ slate_on: `{}`".format(slate_on)) + None) or bool(int( + (self.timeline_out - self.timeline_in + 1) + + self.handle_start + self.handle_end) < self.media_duration) # if slate is on then remove the slate frame from begining if slate_on: @@ -592,7 +588,7 @@ class ClipLoader: return track_item -class Creator(pype.Creator): +class Creator(openpype.Creator): """Creator class wrapper """ clip_color = "Purple" @@ -601,7 +597,7 @@ class Creator(pype.Creator): def __init__(self, *args, **kwargs): import openpype.hosts.hiero.api as phiero super(Creator, self).__init__(*args, **kwargs) - self.presets = pype.get_current_project_settings()[ + self.presets = openpype.get_current_project_settings()[ "hiero"]["create"].get(self.__class__.__name__, {}) # adding basic current context resolve objects @@ -674,6 +670,9 @@ class PublishClip: if kwargs.get("avalon"): self.tag_data.update(kwargs["avalon"]) + # add publish attribute to tag data + self.tag_data.update({"publish": True}) + # adding ui inputs if any self.ui_inputs = kwargs.get("ui_inputs", {}) @@ -687,6 +686,7 @@ class PublishClip: self._create_parents() def convert(self): + # solve track item data and add them to tag data self._convert_to_tag_data() @@ -705,6 +705,12 @@ class PublishClip: self.tag_data["asset"] = new_name else: self.tag_data["asset"] = self.ti_name + self.tag_data["hierarchyData"]["shot"] = self.ti_name + + if self.tag_data["heroTrack"] and self.review_layer: + self.tag_data.update({"reviewTrack": self.review_layer}) + else: + self.tag_data.update({"reviewTrack": None}) # create pype tag on track_item and add data lib.imprint(self.track_item, self.tag_data) @@ -773,8 +779,8 @@ class PublishClip: _spl = text.split("#") _len = (len(_spl) - 1) _repl = "{{{0}:0>{1}}}".format(name, _len) - new_text = text.replace(("#" * _len), _repl) - return new_text + return text.replace(("#" * _len), _repl) + def _convert_to_tag_data(self): """ Convert internal data to tag data. @@ -782,13 +788,13 @@ class PublishClip: Populating the tag data into internal variable self.tag_data """ # define vertical sync attributes - master_layer = True + hero_track = True self.review_layer = "" if self.vertical_sync: # check if track name is not in driving layer if self.track_name not in self.driving_layer: # if it is not then define vertical sync as None - master_layer = False + hero_track = False # increasing steps by index of rename iteration self.count_steps *= self.rename_index @@ -802,7 +808,7 @@ class PublishClip: self.tag_data[_k] = _v["value"] # driving layer is set as positive match - if master_layer or self.vertical_sync: + if hero_track or self.vertical_sync: # mark review layer if self.review_track and ( self.review_track not in self.review_track_default): @@ -836,40 +842,40 @@ class PublishClip: hierarchy_formating_data ) - tag_hierarchy_data.update({"masterLayer": True}) - if master_layer and self.vertical_sync: + tag_hierarchy_data.update({"heroTrack": True}) + if hero_track and self.vertical_sync: self.vertical_clip_match.update({ (self.clip_in, self.clip_out): tag_hierarchy_data }) - if not master_layer and self.vertical_sync: + if not hero_track and self.vertical_sync: # driving layer is set as negative match - for (_in, _out), master_data in self.vertical_clip_match.items(): - master_data.update({"masterLayer": False}) + for (_in, _out), hero_data in self.vertical_clip_match.items(): + hero_data.update({"heroTrack": False}) if _in == self.clip_in and _out == self.clip_out: - data_subset = master_data["subset"] - # add track index in case duplicity of names in master data + data_subset = hero_data["subset"] + # add track index in case duplicity of names in hero data if self.subset in data_subset: - master_data["subset"] = self.subset + str( + hero_data["subset"] = self.subset + str( self.track_index) # in case track name and subset name is the same then add if self.subset_name == self.track_name: - master_data["subset"] = self.subset + hero_data["subset"] = self.subset # assing data to return hierarchy data to tag - tag_hierarchy_data = master_data + tag_hierarchy_data = hero_data # add data to return data dict self.tag_data.update(tag_hierarchy_data) - if master_layer and self.review_layer: - self.tag_data.update({"reviewTrack": self.review_layer}) - def _solve_tag_hierarchy_data(self, hierarchy_formating_data): """ Solve tag data from hierarchy data and templates. """ # fill up clip name and hierarchy keys hierarchy_filled = self.hierarchy.format(**hierarchy_formating_data) clip_name_filled = self.clip_name.format(**hierarchy_formating_data) + # remove shot from hierarchy data: is not needed anymore + hierarchy_formating_data.pop("shot") + return { "newClipName": clip_name_filled, "hierarchy": hierarchy_filled, diff --git a/openpype/hosts/hiero/api/tags.py b/openpype/hosts/hiero/api/tags.py index 06fa655a2e..d2502f3c71 100644 --- a/openpype/hosts/hiero/api/tags.py +++ b/openpype/hosts/hiero/api/tags.py @@ -84,6 +84,13 @@ def update_tag(tag, data): mtd = tag.metadata() # get metadata key from data data_mtd = data.get("metadata", {}) + + # due to hiero bug we have to make sure keys which are not existent in + # data are cleared of value by `None` + for _mk in mtd.keys(): + if _mk.replace("tag.", "") not in data_mtd.keys(): + mtd.setValue(_mk, str(None)) + # set all data metadata to tag metadata for k, v in data_mtd.items(): mtd.setValue( diff --git a/openpype/hosts/hiero/otio/__init__.py b/openpype/hosts/hiero/otio/__init__.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/openpype/hosts/hiero/otio/hiero_export.py b/openpype/hosts/hiero/otio/hiero_export.py new file mode 100644 index 0000000000..6e751d3aa4 --- /dev/null +++ b/openpype/hosts/hiero/otio/hiero_export.py @@ -0,0 +1,366 @@ +""" compatibility OpenTimelineIO 0.12.0 and newer +""" + +import os +import re +import sys +import ast +from compiler.ast import flatten +import opentimelineio as otio +from . import utils +import hiero.core +import hiero.ui + +self = sys.modules[__name__] +self.track_types = { + hiero.core.VideoTrack: otio.schema.TrackKind.Video, + hiero.core.AudioTrack: otio.schema.TrackKind.Audio +} +self.project_fps = None +self.marker_color_map = { + "magenta": otio.schema.MarkerColor.MAGENTA, + "red": otio.schema.MarkerColor.RED, + "yellow": otio.schema.MarkerColor.YELLOW, + "green": otio.schema.MarkerColor.GREEN, + "cyan": otio.schema.MarkerColor.CYAN, + "blue": otio.schema.MarkerColor.BLUE, +} +self.timeline = None +self.include_tags = True + + +def get_current_hiero_project(remove_untitled=False): + projects = flatten(hiero.core.projects()) + if not remove_untitled: + return next(iter(projects)) + + # if remove_untitled + for proj in projects: + if "Untitled" in proj.name(): + proj.close() + else: + return proj + + +def create_otio_rational_time(frame, fps): + return otio.opentime.RationalTime( + float(frame), + float(fps) + ) + + +def create_otio_time_range(start_frame, frame_duration, fps): + return otio.opentime.TimeRange( + start_time=create_otio_rational_time(start_frame, fps), + duration=create_otio_rational_time(frame_duration, fps) + ) + + +def _get_metadata(item): + if hasattr(item, 'metadata'): + return {key: value for key, value in dict(item.metadata()).items()} + return {} + + +def create_otio_reference(clip): + metadata = _get_metadata(clip) + media_source = clip.mediaSource() + + # get file info for path and start frame + file_info = media_source.fileinfos().pop() + frame_start = file_info.startFrame() + path = file_info.filename() + + # get padding and other file infos + padding = media_source.filenamePadding() + file_head = media_source.filenameHead() + is_sequence = not media_source.singleFile() + frame_duration = media_source.duration() + fps = utils.get_rate(clip) or self.project_fps + extension = os.path.splitext(path)[-1] + + if is_sequence: + metadata.update({ + "isSequence": True, + "padding": padding + }) + + # add resolution metadata + metadata.update({ + "openpype.source.colourtransform": clip.sourceMediaColourTransform(), + "openpype.source.width": int(media_source.width()), + "openpype.source.height": int(media_source.height()), + "openpype.source.pixelAspect": float(media_source.pixelAspect()) + }) + + otio_ex_ref_item = None + + if is_sequence: + # if it is file sequence try to create `ImageSequenceReference` + # the OTIO might not be compatible so return nothing and do it old way + try: + dirname = os.path.dirname(path) + otio_ex_ref_item = otio.schema.ImageSequenceReference( + target_url_base=dirname + os.sep, + name_prefix=file_head, + name_suffix=extension, + start_frame=frame_start, + frame_zero_padding=padding, + rate=fps, + available_range=create_otio_time_range( + frame_start, + frame_duration, + fps + ) + ) + except AttributeError: + pass + + if not otio_ex_ref_item: + reformat_path = utils.get_reformated_path(path, padded=False) + # in case old OTIO or video file create `ExternalReference` + otio_ex_ref_item = otio.schema.ExternalReference( + target_url=reformat_path, + available_range=create_otio_time_range( + frame_start, + frame_duration, + fps + ) + ) + + # add metadata to otio item + add_otio_metadata(otio_ex_ref_item, media_source, **metadata) + + return otio_ex_ref_item + + +def get_marker_color(tag): + icon = tag.icon() + pat = r'icons:Tag(?P\w+)\.\w+' + + res = re.search(pat, icon) + if res: + color = res.groupdict().get('color') + if color.lower() in self.marker_color_map: + return self.marker_color_map[color.lower()] + + return otio.schema.MarkerColor.RED + + +def create_otio_markers(otio_item, item): + for tag in item.tags(): + if not tag.visible(): + continue + + if tag.name() == 'Copy': + # Hiero adds this tag to a lot of clips + continue + + frame_rate = utils.get_rate(item) or self.project_fps + + marked_range = otio.opentime.TimeRange( + start_time=otio.opentime.RationalTime( + tag.inTime(), + frame_rate + ), + duration=otio.opentime.RationalTime( + int(tag.metadata().dict().get('tag.length', '0')), + frame_rate + ) + ) + # add tag metadata but remove "tag." string + metadata = {} + + for key, value in tag.metadata().dict().items(): + _key = key.replace("tag.", "") + + try: + # capture exceptions which are related to strings only + _value = ast.literal_eval(value) + except (ValueError, SyntaxError): + _value = value + + metadata.update({_key: _value}) + + # Store the source item for future import assignment + metadata['hiero_source_type'] = item.__class__.__name__ + + marker = otio.schema.Marker( + name=tag.name(), + color=get_marker_color(tag), + marked_range=marked_range, + metadata=metadata + ) + + otio_item.markers.append(marker) + + +def create_otio_clip(track_item): + clip = track_item.source() + source_in = track_item.sourceIn() + duration = track_item.sourceDuration() + fps = utils.get_rate(track_item) or self.project_fps + name = track_item.name() + + media_reference = create_otio_reference(clip) + source_range = create_otio_time_range( + int(source_in), + int(duration), + fps + ) + + otio_clip = otio.schema.Clip( + name=name, + source_range=source_range, + media_reference=media_reference + ) + + # Add tags as markers + if self.include_tags: + create_otio_markers(otio_clip, track_item) + create_otio_markers(otio_clip, track_item.source()) + + return otio_clip + + +def create_otio_gap(gap_start, clip_start, tl_start_frame, fps): + return otio.schema.Gap( + source_range=create_otio_time_range( + gap_start, + (clip_start - tl_start_frame) - gap_start, + fps + ) + ) + + +def _create_otio_timeline(): + project = get_current_hiero_project(remove_untitled=False) + metadata = _get_metadata(self.timeline) + + metadata.update({ + "openpype.timeline.width": int(self.timeline.format().width()), + "openpype.timeline.height": int(self.timeline.format().height()), + "openpype.timeline.pixelAspect": int(self.timeline.format().pixelAspect()), # noqa + "openpype.project.useOCIOEnvironmentOverride": project.useOCIOEnvironmentOverride(), # noqa + "openpype.project.lutSetting16Bit": project.lutSetting16Bit(), + "openpype.project.lutSetting8Bit": project.lutSetting8Bit(), + "openpype.project.lutSettingFloat": project.lutSettingFloat(), + "openpype.project.lutSettingLog": project.lutSettingLog(), + "openpype.project.lutSettingViewer": project.lutSettingViewer(), + "openpype.project.lutSettingWorkingSpace": project.lutSettingWorkingSpace(), # noqa + "openpype.project.lutUseOCIOForExport": project.lutUseOCIOForExport(), + "openpype.project.ocioConfigName": project.ocioConfigName(), + "openpype.project.ocioConfigPath": project.ocioConfigPath() + }) + + start_time = create_otio_rational_time( + self.timeline.timecodeStart(), self.project_fps) + + return otio.schema.Timeline( + name=self.timeline.name(), + global_start_time=start_time, + metadata=metadata + ) + + +def create_otio_track(track_type, track_name): + return otio.schema.Track( + name=track_name, + kind=self.track_types[track_type] + ) + + +def add_otio_gap(track_item, otio_track, prev_out): + gap_length = track_item.timelineIn() - prev_out + if prev_out != 0: + gap_length -= 1 + + gap = otio.opentime.TimeRange( + duration=otio.opentime.RationalTime( + gap_length, + self.project_fps + ) + ) + otio_gap = otio.schema.Gap(source_range=gap) + otio_track.append(otio_gap) + + +def add_otio_metadata(otio_item, media_source, **kwargs): + metadata = _get_metadata(media_source) + + # add additional metadata from kwargs + if kwargs: + metadata.update(kwargs) + + # add metadata to otio item metadata + for key, value in metadata.items(): + otio_item.metadata.update({key: value}) + + +def create_otio_timeline(): + + # get current timeline + self.timeline = hiero.ui.activeSequence() + self.project_fps = self.timeline.framerate().toFloat() + + # convert timeline to otio + otio_timeline = _create_otio_timeline() + + # loop all defined track types + for track in self.timeline.items(): + # skip if track is disabled + if not track.isEnabled(): + continue + + # convert track to otio + otio_track = create_otio_track( + type(track), track.name()) + + for itemindex, track_item in enumerate(track): + # skip offline track items + if not track_item.isMediaPresent(): + continue + + # skip if track item is disabled + if not track_item.isEnabled(): + continue + + # Add Gap if needed + if itemindex == 0: + # if it is first track item at track then add + # it to previouse item + prev_item = track_item + + else: + # get previouse item + prev_item = track_item.parent().items()[itemindex - 1] + + # calculate clip frame range difference from each other + clip_diff = track_item.timelineIn() - prev_item.timelineOut() + + # add gap if first track item is not starting + # at first timeline frame + if itemindex == 0 and track_item.timelineIn() > 0: + add_otio_gap(track_item, otio_track, 0) + + # or add gap if following track items are having + # frame range differences from each other + elif itemindex and clip_diff != 1: + add_otio_gap(track_item, otio_track, prev_item.timelineOut()) + + # create otio clip and add it to track + otio_clip = create_otio_clip(track_item) + otio_track.append(otio_clip) + + # Add tags as markers + if self.include_tags: + create_otio_markers(otio_track, track) + + # add track to otio timeline + otio_timeline.tracks.append(otio_track) + + return otio_timeline + + +def write_to_file(otio_timeline, path): + otio.adapters.write_to_file(otio_timeline, path) diff --git a/openpype/hosts/hiero/otio/hiero_import.py b/openpype/hosts/hiero/otio/hiero_import.py new file mode 100644 index 0000000000..257c434011 --- /dev/null +++ b/openpype/hosts/hiero/otio/hiero_import.py @@ -0,0 +1,545 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- + +__author__ = "Daniel Flehner Heen" +__credits__ = ["Jakub Jezek", "Daniel Flehner Heen"] + + +import os +import hiero.core +import hiero.ui + +import PySide2.QtWidgets as qw + +try: + from urllib import unquote + +except ImportError: + from urllib.parse import unquote # lint:ok + +import opentimelineio as otio + +_otio_old = False + + +def inform(messages): + if isinstance(messages, type('')): + messages = [messages] + + qw.QMessageBox.information( + hiero.ui.mainWindow(), + 'OTIO Import', + '\n'.join(messages), + qw.QMessageBox.StandardButton.Ok + ) + + +def get_transition_type(otio_item, otio_track): + _in, _out = otio_track.neighbors_of(otio_item) + + if isinstance(_in, otio.schema.Gap): + _in = None + + if isinstance(_out, otio.schema.Gap): + _out = None + + if _in and _out: + return 'dissolve' + + elif _in and not _out: + return 'fade_out' + + elif not _in and _out: + return 'fade_in' + + else: + return 'unknown' + + +def find_trackitem(otio_clip, hiero_track): + for item in hiero_track.items(): + if item.timelineIn() == otio_clip.range_in_parent().start_time.value: + if item.name() == otio_clip.name: + return item + + return None + + +def get_neighboring_trackitems(otio_item, otio_track, hiero_track): + _in, _out = otio_track.neighbors_of(otio_item) + trackitem_in = None + trackitem_out = None + + if _in: + trackitem_in = find_trackitem(_in, hiero_track) + + if _out: + trackitem_out = find_trackitem(_out, hiero_track) + + return trackitem_in, trackitem_out + + +def apply_transition(otio_track, otio_item, track): + warning = None + + # Figure out type of transition + transition_type = get_transition_type(otio_item, otio_track) + + # Figure out track kind for getattr below + kind = '' + if isinstance(track, hiero.core.AudioTrack): + kind = 'Audio' + + # Gather TrackItems involved in trasition + item_in, item_out = get_neighboring_trackitems( + otio_item, + otio_track, + track + ) + + # Create transition object + if transition_type == 'dissolve': + transition_func = getattr( + hiero.core.Transition, + 'create{kind}DissolveTransition'.format(kind=kind) + ) + + try: + transition = transition_func( + item_in, + item_out, + otio_item.in_offset.value, + otio_item.out_offset.value + ) + + # Catch error raised if transition is bigger than TrackItem source + except RuntimeError as e: + transition = None + warning = ( + "Unable to apply transition \"{t.name}\": {e} " + "Ignoring the transition.").format(t=otio_item, e=str(e)) + + elif transition_type == 'fade_in': + transition_func = getattr( + hiero.core.Transition, + 'create{kind}FadeInTransition'.format(kind=kind) + ) + + # Warn user if part of fade is outside of clip + if otio_item.in_offset.value: + warning = \ + 'Fist half of transition "{t.name}" is outside of clip and ' \ + 'not valid in Hiero. Only applied second half.' \ + .format(t=otio_item) + + transition = transition_func( + item_out, + otio_item.out_offset.value + ) + + elif transition_type == 'fade_out': + transition_func = getattr( + hiero.core.Transition, + 'create{kind}FadeOutTransition'.format(kind=kind) + ) + transition = transition_func( + item_in, + otio_item.in_offset.value + ) + + # Warn user if part of fade is outside of clip + if otio_item.out_offset.value: + warning = \ + 'Second half of transition "{t.name}" is outside of clip ' \ + 'and not valid in Hiero. Only applied first half.' \ + .format(t=otio_item) + + else: + # Unknown transition + return + + # Apply transition to track + if transition: + track.addTransition(transition) + + # Inform user about missing or adjusted transitions + return warning + + +def prep_url(url_in): + url = unquote(url_in) + + if url.startswith('file://localhost/'): + return url + + url = 'file://localhost{sep}{url}'.format( + sep=url.startswith(os.sep) and '' or os.sep, + url=url.startswith(os.sep) and url[1:] or url + ) + + return url + + +def create_offline_mediasource(otio_clip, path=None): + global _otio_old + + hiero_rate = hiero.core.TimeBase( + otio_clip.source_range.start_time.rate + ) + + try: + legal_media_refs = ( + otio.schema.ExternalReference, + otio.schema.ImageSequenceReference + ) + except AttributeError: + _otio_old = True + legal_media_refs = ( + otio.schema.ExternalReference + ) + + if isinstance(otio_clip.media_reference, legal_media_refs): + source_range = otio_clip.available_range() + + else: + source_range = otio_clip.source_range + + if path is None: + path = otio_clip.name + + media = hiero.core.MediaSource.createOfflineVideoMediaSource( + prep_url(path), + source_range.start_time.value, + source_range.duration.value, + hiero_rate, + source_range.start_time.value + ) + + return media + + +def load_otio(otio_file, project=None, sequence=None): + otio_timeline = otio.adapters.read_from_file(otio_file) + build_sequence(otio_timeline, project=project, sequence=sequence) + + +marker_color_map = { + "PINK": "Magenta", + "RED": "Red", + "ORANGE": "Yellow", + "YELLOW": "Yellow", + "GREEN": "Green", + "CYAN": "Cyan", + "BLUE": "Blue", + "PURPLE": "Magenta", + "MAGENTA": "Magenta", + "BLACK": "Blue", + "WHITE": "Green" +} + + +def get_tag(tagname, tagsbin): + for tag in tagsbin.items(): + if tag.name() == tagname: + return tag + + if isinstance(tag, hiero.core.Bin): + tag = get_tag(tagname, tag) + + if tag is not None: + return tag + + return None + + +def add_metadata(metadata, hiero_item): + for key, value in metadata.get('Hiero', dict()).items(): + if key == 'source_type': + # Only used internally to reassign tag to correct Hiero item + continue + + if isinstance(value, dict): + add_metadata(value, hiero_item) + continue + + if value is not None: + if not key.startswith('tag.'): + key = 'tag.' + key + + hiero_item.metadata().setValue(key, str(value)) + + +def add_markers(otio_item, hiero_item, tagsbin): + if isinstance(otio_item, (otio.schema.Stack, otio.schema.Clip)): + markers = otio_item.markers + + elif isinstance(otio_item, otio.schema.Timeline): + markers = otio_item.tracks.markers + + else: + markers = [] + + for marker in markers: + meta = marker.metadata.get('Hiero', dict()) + if 'source_type' in meta: + if hiero_item.__class__.__name__ != meta.get('source_type'): + continue + + marker_color = marker.color + + _tag = get_tag(marker.name, tagsbin) + if _tag is None: + _tag = get_tag(marker_color_map[marker_color], tagsbin) + + if _tag is None: + _tag = hiero.core.Tag(marker_color_map[marker.color]) + + start = marker.marked_range.start_time.value + end = ( + marker.marked_range.start_time.value + + marker.marked_range.duration.value + ) + + if hasattr(hiero_item, 'addTagToRange'): + tag = hiero_item.addTagToRange(_tag, start, end) + + else: + tag = hiero_item.addTag(_tag) + + tag.setName(marker.name or marker_color_map[marker_color]) + # tag.setNote(meta.get('tag.note', '')) + + # Add metadata + add_metadata(marker.metadata, tag) + + +def create_track(otio_track, tracknum, track_kind): + if track_kind is None and hasattr(otio_track, 'kind'): + track_kind = otio_track.kind + + # Create a Track + if track_kind == otio.schema.TrackKind.Video: + track = hiero.core.VideoTrack( + otio_track.name or 'Video{n}'.format(n=tracknum) + ) + + else: + track = hiero.core.AudioTrack( + otio_track.name or 'Audio{n}'.format(n=tracknum) + ) + + return track + + +def create_clip(otio_clip, tagsbin, sequencebin): + # Create MediaSource + url = None + media = None + otio_media = otio_clip.media_reference + + if isinstance(otio_media, otio.schema.ExternalReference): + url = prep_url(otio_media.target_url) + media = hiero.core.MediaSource(url) + + elif not _otio_old: + if isinstance(otio_media, otio.schema.ImageSequenceReference): + url = prep_url(otio_media.abstract_target_url('#')) + media = hiero.core.MediaSource(url) + + if media is None or media.isOffline(): + media = create_offline_mediasource(otio_clip, url) + + # Reuse previous clip if possible + clip = None + for item in sequencebin.clips(): + if item.activeItem().mediaSource() == media: + clip = item.activeItem() + break + + if not clip: + # Create new Clip + clip = hiero.core.Clip(media) + + # Add Clip to a Bin + sequencebin.addItem(hiero.core.BinItem(clip)) + + # Add markers + add_markers(otio_clip, clip, tagsbin) + + return clip + + +def create_trackitem(playhead, track, otio_clip, clip): + source_range = otio_clip.source_range + + trackitem = track.createTrackItem(otio_clip.name) + trackitem.setPlaybackSpeed(source_range.start_time.rate) + trackitem.setSource(clip) + + time_scalar = 1. + + # Check for speed effects and adjust playback speed accordingly + for effect in otio_clip.effects: + if isinstance(effect, otio.schema.LinearTimeWarp): + time_scalar = effect.time_scalar + # Only reverse effect can be applied here + if abs(time_scalar) == 1.: + trackitem.setPlaybackSpeed( + trackitem.playbackSpeed() * time_scalar) + + elif isinstance(effect, otio.schema.FreezeFrame): + # For freeze frame, playback speed must be set after range + time_scalar = 0. + + # If reverse playback speed swap source in and out + if trackitem.playbackSpeed() < 0: + source_out = source_range.start_time.value + source_in = source_range.end_time_inclusive().value + + timeline_in = playhead + source_out + timeline_out = ( + timeline_in + + source_range.duration.value + ) - 1 + else: + # Normal playback speed + source_in = source_range.start_time.value + source_out = source_range.end_time_inclusive().value + + timeline_in = playhead + timeline_out = ( + timeline_in + + source_range.duration.value + ) - 1 + + # Set source and timeline in/out points + trackitem.setTimes( + timeline_in, + timeline_out, + source_in, + source_out + + ) + + # Apply playback speed for freeze frames + if abs(time_scalar) != 1.: + trackitem.setPlaybackSpeed(trackitem.playbackSpeed() * time_scalar) + + # Link audio to video when possible + if isinstance(track, hiero.core.AudioTrack): + for other in track.parent().trackItemsAt(playhead): + if other.source() == clip: + trackitem.link(other) + + return trackitem + + +def build_sequence( + otio_timeline, project=None, sequence=None, track_kind=None): + if project is None: + if sequence: + project = sequence.project() + + else: + # Per version 12.1v2 there is no way of getting active project + project = hiero.core.projects(hiero.core.Project.kUserProjects)[-1] + + projectbin = project.clipsBin() + + if not sequence: + # Create a Sequence + sequence = hiero.core.Sequence(otio_timeline.name or 'OTIOSequence') + + # Set sequence settings from otio timeline if available + if ( + hasattr(otio_timeline, 'global_start_time') + and otio_timeline.global_start_time + ): + start_time = otio_timeline.global_start_time + sequence.setFramerate(start_time.rate) + sequence.setTimecodeStart(start_time.value) + + # Create a Bin to hold clips + projectbin.addItem(hiero.core.BinItem(sequence)) + + sequencebin = hiero.core.Bin(sequence.name()) + projectbin.addItem(sequencebin) + + else: + sequencebin = projectbin + + # Get tagsBin + tagsbin = hiero.core.project("Tag Presets").tagsBin() + + # Add timeline markers + add_markers(otio_timeline, sequence, tagsbin) + + if isinstance(otio_timeline, otio.schema.Timeline): + tracks = otio_timeline.tracks + + else: + tracks = [otio_timeline] + + for tracknum, otio_track in enumerate(tracks): + playhead = 0 + _transitions = [] + + # Add track to sequence + track = create_track(otio_track, tracknum, track_kind) + sequence.addTrack(track) + + # iterate over items in track + for _itemnum, otio_clip in enumerate(otio_track): + if isinstance(otio_clip, (otio.schema.Track, otio.schema.Stack)): + inform('Nested sequences/tracks are created separately.') + + # Add gap where the nested sequence would have been + playhead += otio_clip.source_range.duration.value + + # Process nested sequence + build_sequence( + otio_clip, + project=project, + track_kind=otio_track.kind + ) + + elif isinstance(otio_clip, otio.schema.Clip): + # Create a Clip + clip = create_clip(otio_clip, tagsbin, sequencebin) + + # Create TrackItem + trackitem = create_trackitem( + playhead, + track, + otio_clip, + clip + ) + + # Add markers + add_markers(otio_clip, trackitem, tagsbin) + + # Add trackitem to track + track.addTrackItem(trackitem) + + # Update playhead + playhead = trackitem.timelineOut() + 1 + + elif isinstance(otio_clip, otio.schema.Transition): + # Store transitions for when all clips in the track are created + _transitions.append((otio_track, otio_clip)) + + elif isinstance(otio_clip, otio.schema.Gap): + # Hiero has no fillers, slugs or blanks at the moment + playhead += otio_clip.source_range.duration.value + + # Apply transitions we stored earlier now that all clips are present + warnings = [] + for otio_track, otio_item in _transitions: + # Catch warnings form transitions in case + # of unsupported transitions + warning = apply_transition(otio_track, otio_item, track) + if warning: + warnings.append(warning) + + if warnings: + inform(warnings) diff --git a/openpype/hosts/hiero/otio/utils.py b/openpype/hosts/hiero/otio/utils.py new file mode 100644 index 0000000000..f882a5d1f2 --- /dev/null +++ b/openpype/hosts/hiero/otio/utils.py @@ -0,0 +1,76 @@ +import re +import opentimelineio as otio + + +def timecode_to_frames(timecode, framerate): + rt = otio.opentime.from_timecode(timecode, 24) + return int(otio.opentime.to_frames(rt)) + + +def frames_to_timecode(frames, framerate): + rt = otio.opentime.from_frames(frames, framerate) + return otio.opentime.to_timecode(rt) + + +def frames_to_secons(frames, framerate): + rt = otio.opentime.from_frames(frames, framerate) + return otio.opentime.to_seconds(rt) + + +def get_reformated_path(path, padded=True): + """ + Return fixed python expression path + + Args: + path (str): path url or simple file name + + Returns: + type: string with reformated path + + Example: + get_reformated_path("plate.[0001-1008].exr") > plate.%04d.exr + + """ + if "%" in path: + padding_pattern = r"(\d+)" + padding = int(re.findall(padding_pattern, path).pop()) + num_pattern = r"(%\d+d)" + if padded: + path = re.sub(num_pattern, "%0{}d".format(padding), path) + else: + path = re.sub(num_pattern, "%d", path) + return path + + +def get_padding_from_path(path): + """ + Return padding number from DaVinci Resolve sequence path style + + Args: + path (str): path url or simple file name + + Returns: + int: padding number + + Example: + get_padding_from_path("plate.[0001-1008].exr") > 4 + + """ + padding_pattern = "(\\d+)(?=-)" + if "[" in path: + return len(re.findall(padding_pattern, path).pop()) + + return None + + +def get_rate(item): + if not hasattr(item, 'framerate'): + return None + + num, den = item.framerate().toRational() + rate = float(num) / float(den) + + if rate.is_integer(): + return rate + + return round(rate, 4) diff --git a/openpype/hosts/hiero/plugins/create/create_shot_clip.py b/openpype/hosts/hiero/plugins/create/create_shot_clip.py index 07b7a62b2a..25be9f090b 100644 --- a/openpype/hosts/hiero/plugins/create/create_shot_clip.py +++ b/openpype/hosts/hiero/plugins/create/create_shot_clip.py @@ -120,9 +120,9 @@ class CreateShotClip(phiero.Creator): "vSyncTrack": { "value": gui_tracks, # noqa "type": "QComboBox", - "label": "Master track", + "label": "Hero track", "target": "ui", - "toolTip": "Select driving track name which should be mastering all others", # noqa + "toolTip": "Select driving track name which should be hero for all others", # noqa "order": 1} } }, diff --git a/openpype/hosts/hiero/plugins/load/load_clip.py b/openpype/hosts/hiero/plugins/load/load_clip.py index 4eadf28956..9e12fa360e 100644 --- a/openpype/hosts/hiero/plugins/load/load_clip.py +++ b/openpype/hosts/hiero/plugins/load/load_clip.py @@ -29,13 +29,19 @@ class LoadClip(phiero.SequenceLoader): clip_color_last = "green" clip_color = "red" - def load(self, context, name, namespace, options): + clip_name_template = "{asset}_{subset}_{representation}" + def load(self, context, name, namespace, options): + # add clip name template to options + options.update({ + "clipNameTemplate": self.clip_name_template + }) # in case loader uses multiselection if self.track and self.sequence: options.update({ "sequence": self.sequence, - "track": self.track + "track": self.track, + "clipNameTemplate": self.clip_name_template }) # load clip to timeline and get main variables @@ -45,7 +51,8 @@ class LoadClip(phiero.SequenceLoader): version_data = version.get("data", {}) version_name = version.get("name", None) colorspace = version_data.get("colorspace", None) - object_name = "{}_{}".format(name, namespace) + object_name = self.clip_name_template.format( + **context["representation"]["context"]) # add additional metadata from the version to imprint Avalon knob add_keys = [ diff --git a/openpype/hosts/hiero/plugins/publish/extract_thumbnail.py b/openpype/hosts/hiero/plugins/publish/extract_thumbnail.py new file mode 100644 index 0000000000..d12e7665bf --- /dev/null +++ b/openpype/hosts/hiero/plugins/publish/extract_thumbnail.py @@ -0,0 +1,59 @@ +import os +import pyblish.api +import openpype.api + + +class ExtractThumnail(openpype.api.Extractor): + """ + Extractor for track item's tumnails + """ + + label = "Extract Thumnail" + order = pyblish.api.ExtractorOrder + families = ["plate", "take"] + hosts = ["hiero"] + + def process(self, instance): + # create representation data + if "representations" not in instance.data: + instance.data["representations"] = [] + + staging_dir = self.staging_dir(instance) + + self.create_thumbnail(staging_dir, instance) + + def create_thumbnail(self, staging_dir, instance): + track_item = instance.data["item"] + track_item_name = track_item.name() + + # frames + duration = track_item.sourceDuration() + frame_start = track_item.sourceIn() + self.log.debug( + "__ frame_start: `{}`, duration: `{}`".format( + frame_start, duration)) + + # get thumbnail frame from the middle + thumb_frame = int(frame_start + (duration / 2)) + + thumb_file = "{}thumbnail{}{}".format( + track_item_name, thumb_frame, ".png") + thumb_path = os.path.join(staging_dir, thumb_file) + + thumbnail = track_item.thumbnail(thumb_frame).save( + thumb_path, + format='png' + ) + self.log.debug( + "__ thumb_path: `{}`, frame: `{}`".format(thumbnail, thumb_frame)) + + self.log.info("Thumnail was generated to: {}".format(thumb_path)) + thumb_representation = { + 'files': thumb_file, + 'stagingDir': staging_dir, + 'name': "thumbnail", + 'thumbnail': True, + 'ext': "png" + } + instance.data["representations"].append( + thumb_representation) diff --git a/openpype/hosts/hiero/plugins/publish/version_up_workfile.py b/openpype/hosts/hiero/plugins/publish/integrate_version_up_workfile.py similarity index 90% rename from openpype/hosts/hiero/plugins/publish/version_up_workfile.py rename to openpype/hosts/hiero/plugins/publish/integrate_version_up_workfile.py index ae03513d78..934e7112fa 100644 --- a/openpype/hosts/hiero/plugins/publish/version_up_workfile.py +++ b/openpype/hosts/hiero/plugins/publish/integrate_version_up_workfile.py @@ -2,7 +2,7 @@ from pyblish import api import openpype.api as pype -class VersionUpWorkfile(api.ContextPlugin): +class IntegrateVersionUpWorkfile(api.ContextPlugin): """Save as new workfile version""" order = api.IntegratorOrder + 10.1 diff --git a/openpype/hosts/hiero/plugins/publish/precollect_instances.py b/openpype/hosts/hiero/plugins/publish/precollect_instances.py index bdf007de06..a1dee711b7 100644 --- a/openpype/hosts/hiero/plugins/publish/precollect_instances.py +++ b/openpype/hosts/hiero/plugins/publish/precollect_instances.py @@ -1,221 +1,204 @@ -from compiler.ast import flatten -from pyblish import api +import pyblish +import openpype from openpype.hosts.hiero import api as phiero -import hiero -# from openpype.hosts.hiero.api import lib -# reload(lib) -# reload(phiero) +from openpype.hosts.hiero.otio import hiero_export + +# # developer reload modules +from pprint import pformat -class PreCollectInstances(api.ContextPlugin): +class PrecollectInstances(pyblish.api.ContextPlugin): """Collect all Track items selection.""" - order = api.CollectorOrder - 0.509 - label = "Pre-collect Instances" + order = pyblish.api.CollectorOrder - 0.59 + label = "Precollect Instances" hosts = ["hiero"] def process(self, context): - track_items = phiero.get_track_items( - selected=True, check_tagged=True, check_enabled=True) - # only return enabled track items - if not track_items: - track_items = phiero.get_track_items( - check_enabled=True, check_tagged=True) - # get sequence and video tracks - sequence = context.data["activeSequence"] - tracks = sequence.videoTracks() - - # add collection to context - tracks_effect_items = self.collect_sub_track_items(tracks) - - context.data["tracksEffectItems"] = tracks_effect_items - + otio_timeline = context.data["otioTimeline"] + selected_timeline_items = phiero.get_track_items( + selected=True, check_enabled=True, check_tagged=True) self.log.info( - "Processing enabled track items: {}".format(len(track_items))) + "Processing enabled track items: {}".format( + selected_timeline_items)) + + for track_item in selected_timeline_items: - for _ti in track_items: data = dict() - clip = _ti.source() + clip_name = track_item.name() - # get clips subtracks and anotations - annotations = self.clip_annotations(clip) - subtracks = self.clip_subtrack(_ti) - self.log.debug("Annotations: {}".format(annotations)) - self.log.debug(">> Subtracks: {}".format(subtracks)) + # get openpype tag data + tag_data = phiero.get_track_item_pype_data(track_item) + self.log.debug("__ tag_data: {}".format(pformat(tag_data))) - # get pype tag data - tag_parsed_data = phiero.get_track_item_pype_data(_ti) - # self.log.debug(pformat(tag_parsed_data)) - - if not tag_parsed_data: + if not tag_data: continue - if tag_parsed_data.get("id") != "pyblish.avalon.instance": + if tag_data.get("id") != "pyblish.avalon.instance": continue + + # solve handles length + tag_data["handleStart"] = min( + tag_data["handleStart"], int(track_item.handleInLength())) + tag_data["handleEnd"] = min( + tag_data["handleEnd"], int(track_item.handleOutLength())) + # add tag data to instance data data.update({ - k: v for k, v in tag_parsed_data.items() + k: v for k, v in tag_data.items() if k not in ("id", "applieswhole", "label") }) - asset = tag_parsed_data["asset"] - subset = tag_parsed_data["subset"] - review = tag_parsed_data.get("review") - audio = tag_parsed_data.get("audio") - - # remove audio attribute from data - data.pop("audio") + asset = tag_data["asset"] + subset = tag_data["subset"] # insert family into families - family = tag_parsed_data["family"] - families = [str(f) for f in tag_parsed_data["families"]] + family = tag_data["family"] + families = [str(f) for f in tag_data["families"]] families.insert(0, str(family)) - track = _ti.parent() - media_source = _ti.source().mediaSource() - source_path = media_source.firstpath() - file_head = media_source.filenameHead() - file_info = media_source.fileinfos().pop() - source_first_frame = int(file_info.startFrame()) - - # apply only for feview and master track instance - if review: - families += ["review", "ftrack"] + # form label + label = asset + if asset != clip_name: + label += " ({})".format(clip_name) + label += " {}".format(subset) + label += " {}".format("[" + ", ".join(families) + "]") data.update({ - "name": "{} {} {}".format(asset, subset, families), + "name": "{}_{}".format(asset, subset), + "label": label, "asset": asset, - "item": _ti, + "item": track_item, "families": families, - - # tags - "tags": _ti.tags(), - - # track item attributes - "track": track.name(), - "trackItem": track, - - # version data - "versionData": { - "colorspace": _ti.sourceMediaColourTransform() - }, - - # source attribute - "source": source_path, - "sourceMedia": media_source, - "sourcePath": source_path, - "sourceFileHead": file_head, - "sourceFirst": source_first_frame, - - # clip's effect - "clipEffectItems": subtracks + "publish": tag_data["publish"], + "fps": context.data["fps"] }) + # otio clip data + otio_data = self.get_otio_clip_instance_data( + otio_timeline, track_item) or {} + self.log.debug("__ otio_data: {}".format(pformat(otio_data))) + data.update(otio_data) + self.log.debug("__ data: {}".format(pformat(data))) + + # add resolution + self.get_resolution_to_data(data, context) + + # create instance instance = context.create_instance(**data) + # create shot instance for shot attributes create/update + self.create_shot_instance(context, **data) + self.log.info("Creating instance: {}".format(instance)) + self.log.debug( + "_ instance.data: {}".format(pformat(instance.data))) - if audio: - a_data = dict() + def get_resolution_to_data(self, data, context): + assert data.get("otioClip"), "Missing `otioClip` data" - # add tag data to instance data - a_data.update({ - k: v for k, v in tag_parsed_data.items() - if k not in ("id", "applieswhole", "label") - }) + # solve source resolution option + if data.get("sourceResolution", None): + otio_clip_metadata = data[ + "otioClip"].media_reference.metadata + data.update({ + "resolutionWidth": otio_clip_metadata[ + "openpype.source.width"], + "resolutionHeight": otio_clip_metadata[ + "openpype.source.height"], + "pixelAspect": otio_clip_metadata[ + "openpype.source.pixelAspect"] + }) + else: + otio_tl_metadata = context.data["otioTimeline"].metadata + data.update({ + "resolutionWidth": otio_tl_metadata["openpype.timeline.width"], + "resolutionHeight": otio_tl_metadata[ + "openpype.timeline.height"], + "pixelAspect": otio_tl_metadata[ + "openpype.timeline.pixelAspect"] + }) - # create main attributes - subset = "audioMain" - family = "audio" - families = ["clip", "ftrack"] - families.insert(0, str(family)) + def create_shot_instance(self, context, **data): + master_layer = data.get("heroTrack") + hierarchy_data = data.get("hierarchyData") + asset = data.get("asset") + item = data.get("item") + clip_name = item.name() - name = "{} {} {}".format(asset, subset, families) + if not master_layer: + return - a_data.update({ - "name": name, - "subset": subset, - "asset": asset, - "family": family, - "families": families, - "item": _ti, + if not hierarchy_data: + return - # tags - "tags": _ti.tags(), - }) + asset = data["asset"] + subset = "shotMain" - a_instance = context.create_instance(**a_data) - self.log.info("Creating audio instance: {}".format(a_instance)) + # insert family into families + family = "shot" + + # form label + label = asset + if asset != clip_name: + label += " ({}) ".format(clip_name) + label += " {}".format(subset) + label += " [{}]".format(family) + + data.update({ + "name": "{}_{}".format(asset, subset), + "label": label, + "subset": subset, + "asset": asset, + "family": family, + "families": [] + }) + + instance = context.create_instance(**data) + self.log.info("Creating instance: {}".format(instance)) + self.log.debug( + "_ instance.data: {}".format(pformat(instance.data))) + + def get_otio_clip_instance_data(self, otio_timeline, track_item): + """ + Return otio objects for timeline, track and clip + + Args: + timeline_item_data (dict): timeline_item_data from list returned by + resolve.get_current_timeline_items() + otio_timeline (otio.schema.Timeline): otio object + + Returns: + dict: otio clip object + + """ + ti_track_name = track_item.parent().name() + timeline_range = self.create_otio_time_range_from_timeline_item_data( + track_item) + for otio_clip in otio_timeline.each_clip(): + track_name = otio_clip.parent().name + parent_range = otio_clip.range_in_parent() + if ti_track_name not in track_name: + continue + if otio_clip.name not in track_item.name(): + continue + if openpype.lib.is_overlapping_otio_ranges( + parent_range, timeline_range, strict=True): + + # add pypedata marker to otio_clip metadata + for marker in otio_clip.markers: + if phiero.pype_tag_name in marker.name: + otio_clip.metadata.update(marker.metadata) + return {"otioClip": otio_clip} + + return None @staticmethod - def clip_annotations(clip): - """ - Returns list of Clip's hiero.core.Annotation - """ - annotations = [] - subTrackItems = flatten(clip.subTrackItems()) - annotations += [item for item in subTrackItems if isinstance( - item, hiero.core.Annotation)] - return annotations + def create_otio_time_range_from_timeline_item_data(track_item): + timeline = phiero.get_current_sequence() + frame_start = int(track_item.timelineIn()) + frame_duration = int(track_item.sourceDuration()) + fps = timeline.framerate().toFloat() - @staticmethod - def clip_subtrack(clip): - """ - Returns list of Clip's hiero.core.SubTrackItem - """ - subtracks = [] - subTrackItems = flatten(clip.parent().subTrackItems()) - for item in subTrackItems: - # avoid all anotation - if isinstance(item, hiero.core.Annotation): - continue - # # avoid all not anaibled - if not item.isEnabled(): - continue - subtracks.append(item) - return subtracks - - @staticmethod - def collect_sub_track_items(tracks): - """ - Returns dictionary with track index as key and list of subtracks - """ - # collect all subtrack items - sub_track_items = dict() - for track in tracks: - items = track.items() - - # skip if no clips on track > need track with effect only - if items: - continue - - # skip all disabled tracks - if not track.isEnabled(): - continue - - track_index = track.trackIndex() - _sub_track_items = flatten(track.subTrackItems()) - - # continue only if any subtrack items are collected - if len(_sub_track_items) < 1: - continue - - enabled_sti = list() - # loop all found subtrack items and check if they are enabled - for _sti in _sub_track_items: - # checking if not enabled - if not _sti.isEnabled(): - continue - if isinstance(_sti, hiero.core.Annotation): - continue - # collect the subtrack item - enabled_sti.append(_sti) - - # continue only if any subtrack items are collected - if len(enabled_sti) < 1: - continue - - # add collection of subtrackitems to dict - sub_track_items[track_index] = enabled_sti - - return sub_track_items + return hiero_export.create_otio_time_range( + frame_start, frame_duration, fps) diff --git a/openpype/hosts/hiero/plugins/publish/precollect_workfile.py b/openpype/hosts/hiero/plugins/publish/precollect_workfile.py index ef7d07421b..bc4ef7e150 100644 --- a/openpype/hosts/hiero/plugins/publish/precollect_workfile.py +++ b/openpype/hosts/hiero/plugins/publish/precollect_workfile.py @@ -1,52 +1,57 @@ import os import pyblish.api +import hiero.ui from openpype.hosts.hiero import api as phiero from avalon import api as avalon +from pprint import pformat +from openpype.hosts.hiero.otio import hiero_export +from Qt.QtGui import QPixmap +import tempfile - -class PreCollectWorkfile(pyblish.api.ContextPlugin): +class PrecollectWorkfile(pyblish.api.ContextPlugin): """Inject the current working file into context""" - label = "Pre-collect Workfile" - order = pyblish.api.CollectorOrder - 0.51 + label = "Precollect Workfile" + order = pyblish.api.CollectorOrder - 0.6 def process(self, context): + asset = avalon.Session["AVALON_ASSET"] subset = "workfile" - project = phiero.get_current_project() - active_sequence = phiero.get_current_sequence() - video_tracks = active_sequence.videoTracks() - audio_tracks = active_sequence.audioTracks() - current_file = project.path() - staging_dir = os.path.dirname(current_file) - base_name = os.path.basename(current_file) + active_timeline = hiero.ui.activeSequence() + fps = active_timeline.framerate().toFloat() - # get workfile's colorspace properties - _clrs = {} - _clrs["useOCIOEnvironmentOverride"] = project.useOCIOEnvironmentOverride() # noqa - _clrs["lutSetting16Bit"] = project.lutSetting16Bit() - _clrs["lutSetting8Bit"] = project.lutSetting8Bit() - _clrs["lutSettingFloat"] = project.lutSettingFloat() - _clrs["lutSettingLog"] = project.lutSettingLog() - _clrs["lutSettingViewer"] = project.lutSettingViewer() - _clrs["lutSettingWorkingSpace"] = project.lutSettingWorkingSpace() - _clrs["lutUseOCIOForExport"] = project.lutUseOCIOForExport() - _clrs["ocioConfigName"] = project.ocioConfigName() - _clrs["ocioConfigPath"] = project.ocioConfigPath() + # adding otio timeline to context + otio_timeline = hiero_export.create_otio_timeline() - # set main project attributes to context - context.data["activeProject"] = project - context.data["activeSequence"] = active_sequence - context.data["videoTracks"] = video_tracks - context.data["audioTracks"] = audio_tracks - context.data["currentFile"] = current_file - context.data["colorspace"] = _clrs + # get workfile thumnail paths + tmp_staging = tempfile.mkdtemp(prefix="pyblish_tmp_") + thumbnail_name = "workfile_thumbnail.png" + thumbnail_path = os.path.join(tmp_staging, thumbnail_name) - self.log.info("currentFile: {}".format(current_file)) + # search for all windows with name of actual sequence + _windows = [w for w in hiero.ui.windowManager().windows() + if active_timeline.name() in w.windowTitle()] + + # export window to thumb path + QPixmap.grabWidget(_windows[-1]).save(thumbnail_path, 'png') + + # thumbnail + thumb_representation = { + 'files': thumbnail_name, + 'stagingDir': tmp_staging, + 'name': "thumbnail", + 'thumbnail': True, + 'ext': "png" + } + + # get workfile paths + curent_file = project.path() + staging_dir, base_name = os.path.split(curent_file) # creating workfile representation - representation = { + workfile_representation = { 'name': 'hrox', 'ext': 'hrox', 'files': base_name, @@ -59,16 +64,21 @@ class PreCollectWorkfile(pyblish.api.ContextPlugin): "subset": "{}{}".format(asset, subset.capitalize()), "item": project, "family": "workfile", - - # version data - "versionData": { - "colorspace": _clrs - }, - - # source attribute - "sourcePath": current_file, - "representations": [representation] + "representations": [workfile_representation, thumb_representation] } + # create instance with workfile instance = context.create_instance(**instance_data) + + # update context with main project attributes + context_data = { + "activeProject": project, + "otioTimeline": otio_timeline, + "currentFile": curent_file, + "fps": fps, + } + context.data.update(context_data) + self.log.info("Creating instance: {}".format(instance)) + self.log.debug("__ instance.data: {}".format(pformat(instance.data))) + self.log.debug("__ context_data: {}".format(pformat(context_data))) diff --git a/openpype/hosts/hiero/plugins/publish/collect_assetbuilds.py b/openpype/hosts/hiero/plugins/publish_old_workflow/collect_assetbuilds.py similarity index 100% rename from openpype/hosts/hiero/plugins/publish/collect_assetbuilds.py rename to openpype/hosts/hiero/plugins/publish_old_workflow/collect_assetbuilds.py diff --git a/openpype/hosts/hiero/plugins/publish/collect_clip_resolution.py b/openpype/hosts/hiero/plugins/publish_old_workflow/collect_clip_resolution.py similarity index 100% rename from openpype/hosts/hiero/plugins/publish/collect_clip_resolution.py rename to openpype/hosts/hiero/plugins/publish_old_workflow/collect_clip_resolution.py diff --git a/openpype/hosts/hiero/plugins/publish/collect_frame_ranges.py b/openpype/hosts/hiero/plugins/publish_old_workflow/collect_frame_ranges.py similarity index 97% rename from openpype/hosts/hiero/plugins/publish/collect_frame_ranges.py rename to openpype/hosts/hiero/plugins/publish_old_workflow/collect_frame_ranges.py index 39387578d2..21e12e89fa 100644 --- a/openpype/hosts/hiero/plugins/publish/collect_frame_ranges.py +++ b/openpype/hosts/hiero/plugins/publish_old_workflow/collect_frame_ranges.py @@ -5,7 +5,7 @@ class CollectFrameRanges(pyblish.api.InstancePlugin): """ Collect all framranges. """ - order = pyblish.api.CollectorOrder + order = pyblish.api.CollectorOrder - 0.1 label = "Collect Frame Ranges" hosts = ["hiero"] families = ["clip", "effect"] diff --git a/openpype/hosts/hiero/plugins/publish/collect_hierarchy_context.py b/openpype/hosts/hiero/plugins/publish_old_workflow/collect_hierarchy_context.py similarity index 97% rename from openpype/hosts/hiero/plugins/publish/collect_hierarchy_context.py rename to openpype/hosts/hiero/plugins/publish_old_workflow/collect_hierarchy_context.py index ba3e388c53..0696a58e39 100644 --- a/openpype/hosts/hiero/plugins/publish/collect_hierarchy_context.py +++ b/openpype/hosts/hiero/plugins/publish_old_workflow/collect_hierarchy_context.py @@ -39,8 +39,8 @@ class CollectHierarchy(pyblish.api.ContextPlugin): if not set(self.families).intersection(families): continue - # exclude if not masterLayer True - if not instance.data.get("masterLayer"): + # exclude if not heroTrack True + if not instance.data.get("heroTrack"): continue # update families to include `shot` for hierarchy integration diff --git a/openpype/hosts/hiero/plugins/publish/collect_host_version.py b/openpype/hosts/hiero/plugins/publish_old_workflow/collect_host_version.py similarity index 100% rename from openpype/hosts/hiero/plugins/publish/collect_host_version.py rename to openpype/hosts/hiero/plugins/publish_old_workflow/collect_host_version.py diff --git a/openpype/hosts/hiero/plugins/publish/collect_plates.py b/openpype/hosts/hiero/plugins/publish_old_workflow/collect_plates.py similarity index 100% rename from openpype/hosts/hiero/plugins/publish/collect_plates.py rename to openpype/hosts/hiero/plugins/publish_old_workflow/collect_plates.py diff --git a/openpype/hosts/hiero/plugins/publish/collect_review.py b/openpype/hosts/hiero/plugins/publish_old_workflow/collect_review.py similarity index 99% rename from openpype/hosts/hiero/plugins/publish/collect_review.py rename to openpype/hosts/hiero/plugins/publish_old_workflow/collect_review.py index a0ab00b355..b1d97a71d7 100644 --- a/openpype/hosts/hiero/plugins/publish/collect_review.py +++ b/openpype/hosts/hiero/plugins/publish_old_workflow/collect_review.py @@ -29,7 +29,7 @@ class CollectReview(api.InstancePlugin): Exception: description """ - review_track = instance.data.get("review") + review_track = instance.data.get("reviewTrack") video_tracks = instance.context.data["videoTracks"] for track in video_tracks: if review_track not in track.name(): diff --git a/openpype/hosts/hiero/plugins/publish/collect_tag_tasks.py b/openpype/hosts/hiero/plugins/publish_old_workflow/collect_tag_tasks.py similarity index 100% rename from openpype/hosts/hiero/plugins/publish/collect_tag_tasks.py rename to openpype/hosts/hiero/plugins/publish_old_workflow/collect_tag_tasks.py diff --git a/openpype/hosts/hiero/plugins/publish/extract_audio.py b/openpype/hosts/hiero/plugins/publish_old_workflow/extract_audio.py similarity index 100% rename from openpype/hosts/hiero/plugins/publish/extract_audio.py rename to openpype/hosts/hiero/plugins/publish_old_workflow/extract_audio.py diff --git a/openpype/hosts/hiero/plugins/publish/extract_clip_effects.py b/openpype/hosts/hiero/plugins/publish_old_workflow/extract_clip_effects.py similarity index 100% rename from openpype/hosts/hiero/plugins/publish/extract_clip_effects.py rename to openpype/hosts/hiero/plugins/publish_old_workflow/extract_clip_effects.py diff --git a/openpype/hosts/hiero/plugins/publish/extract_review_preparation.py b/openpype/hosts/hiero/plugins/publish_old_workflow/extract_review_preparation.py similarity index 98% rename from openpype/hosts/hiero/plugins/publish/extract_review_preparation.py rename to openpype/hosts/hiero/plugins/publish_old_workflow/extract_review_preparation.py index 5456ddc3c4..aac476e27a 100644 --- a/openpype/hosts/hiero/plugins/publish/extract_review_preparation.py +++ b/openpype/hosts/hiero/plugins/publish_old_workflow/extract_review_preparation.py @@ -132,7 +132,7 @@ class ExtractReviewPreparation(openpype.api.Extractor): ).format(**locals()) self.log.debug("ffprob_cmd: {}".format(ffprob_cmd)) - audio_check_output = openpype.api.subprocess(ffprob_cmd) + audio_check_output = openpype.api.run_subprocess(ffprob_cmd) self.log.debug( "audio_check_output: {}".format(audio_check_output)) @@ -167,7 +167,7 @@ class ExtractReviewPreparation(openpype.api.Extractor): # try to get video native resolution data try: - resolution_output = openpype.api.subprocess(( + resolution_output = openpype.api.run_subprocess(( "\"{ffprobe_path}\" -i \"{full_input_path}\"" " -v error " "-select_streams v:0 -show_entries " @@ -280,7 +280,7 @@ class ExtractReviewPreparation(openpype.api.Extractor): # run subprocess self.log.debug("Executing: {}".format(subprcs_cmd)) - output = openpype.api.subprocess(subprcs_cmd) + output = openpype.api.run_subprocess(subprcs_cmd) self.log.debug("Output: {}".format(output)) repre_new = { diff --git a/openpype/hosts/hiero/plugins/publish/precollect_clip_effects.py b/openpype/hosts/hiero/plugins/publish_old_workflow/precollect_clip_effects.py similarity index 100% rename from openpype/hosts/hiero/plugins/publish/precollect_clip_effects.py rename to openpype/hosts/hiero/plugins/publish_old_workflow/precollect_clip_effects.py diff --git a/openpype/hosts/hiero/plugins/publish_old_workflow/precollect_instances.py b/openpype/hosts/hiero/plugins/publish_old_workflow/precollect_instances.py new file mode 100644 index 0000000000..f9cc158e79 --- /dev/null +++ b/openpype/hosts/hiero/plugins/publish_old_workflow/precollect_instances.py @@ -0,0 +1,223 @@ +from compiler.ast import flatten +from pyblish import api +from openpype.hosts.hiero import api as phiero +import hiero +# from openpype.hosts.hiero.api import lib +# reload(lib) +# reload(phiero) + + +class PreCollectInstances(api.ContextPlugin): + """Collect all Track items selection.""" + + order = api.CollectorOrder - 0.509 + label = "Pre-collect Instances" + hosts = ["hiero"] + + def process(self, context): + track_items = phiero.get_track_items( + selected=True, check_tagged=True, check_enabled=True) + # only return enabled track items + if not track_items: + track_items = phiero.get_track_items( + check_enabled=True, check_tagged=True) + # get sequence and video tracks + sequence = context.data["activeSequence"] + tracks = sequence.videoTracks() + + # add collection to context + tracks_effect_items = self.collect_sub_track_items(tracks) + + context.data["tracksEffectItems"] = tracks_effect_items + + self.log.info( + "Processing enabled track items: {}".format(len(track_items))) + + for _ti in track_items: + data = {} + clip = _ti.source() + + # get clips subtracks and anotations + annotations = self.clip_annotations(clip) + subtracks = self.clip_subtrack(_ti) + self.log.debug("Annotations: {}".format(annotations)) + self.log.debug(">> Subtracks: {}".format(subtracks)) + + # get pype tag data + tag_parsed_data = phiero.get_track_item_pype_data(_ti) + # self.log.debug(pformat(tag_parsed_data)) + + if not tag_parsed_data: + continue + + if tag_parsed_data.get("id") != "pyblish.avalon.instance": + continue + # add tag data to instance data + data.update({ + k: v for k, v in tag_parsed_data.items() + if k not in ("id", "applieswhole", "label") + }) + + asset = tag_parsed_data["asset"] + subset = tag_parsed_data["subset"] + review_track = tag_parsed_data.get("reviewTrack") + hiero_track = tag_parsed_data.get("heroTrack") + audio = tag_parsed_data.get("audio") + + # remove audio attribute from data + data.pop("audio") + + # insert family into families + family = tag_parsed_data["family"] + families = [str(f) for f in tag_parsed_data["families"]] + families.insert(0, str(family)) + + track = _ti.parent() + media_source = _ti.source().mediaSource() + source_path = media_source.firstpath() + file_head = media_source.filenameHead() + file_info = media_source.fileinfos().pop() + source_first_frame = int(file_info.startFrame()) + + # apply only for review and master track instance + if review_track and hiero_track: + families += ["review", "ftrack"] + + data.update({ + "name": "{} {} {}".format(asset, subset, families), + "asset": asset, + "item": _ti, + "families": families, + + # tags + "tags": _ti.tags(), + + # track item attributes + "track": track.name(), + "trackItem": track, + "reviewTrack": review_track, + + # version data + "versionData": { + "colorspace": _ti.sourceMediaColourTransform() + }, + + # source attribute + "source": source_path, + "sourceMedia": media_source, + "sourcePath": source_path, + "sourceFileHead": file_head, + "sourceFirst": source_first_frame, + + # clip's effect + "clipEffectItems": subtracks + }) + + instance = context.create_instance(**data) + + self.log.info("Creating instance.data: {}".format(instance.data)) + + if audio: + a_data = dict() + + # add tag data to instance data + a_data.update({ + k: v for k, v in tag_parsed_data.items() + if k not in ("id", "applieswhole", "label") + }) + + # create main attributes + subset = "audioMain" + family = "audio" + families = ["clip", "ftrack"] + families.insert(0, str(family)) + + name = "{} {} {}".format(asset, subset, families) + + a_data.update({ + "name": name, + "subset": subset, + "asset": asset, + "family": family, + "families": families, + "item": _ti, + + # tags + "tags": _ti.tags(), + }) + + a_instance = context.create_instance(**a_data) + self.log.info("Creating audio instance: {}".format(a_instance)) + + @staticmethod + def clip_annotations(clip): + """ + Returns list of Clip's hiero.core.Annotation + """ + annotations = [] + subTrackItems = flatten(clip.subTrackItems()) + annotations += [item for item in subTrackItems if isinstance( + item, hiero.core.Annotation)] + return annotations + + @staticmethod + def clip_subtrack(clip): + """ + Returns list of Clip's hiero.core.SubTrackItem + """ + subtracks = [] + subTrackItems = flatten(clip.parent().subTrackItems()) + for item in subTrackItems: + # avoid all anotation + if isinstance(item, hiero.core.Annotation): + continue + # # avoid all not anaibled + if not item.isEnabled(): + continue + subtracks.append(item) + return subtracks + + @staticmethod + def collect_sub_track_items(tracks): + """ + Returns dictionary with track index as key and list of subtracks + """ + # collect all subtrack items + sub_track_items = dict() + for track in tracks: + items = track.items() + + # skip if no clips on track > need track with effect only + if items: + continue + + # skip all disabled tracks + if not track.isEnabled(): + continue + + track_index = track.trackIndex() + _sub_track_items = flatten(track.subTrackItems()) + + # continue only if any subtrack items are collected + if len(_sub_track_items) < 1: + continue + + enabled_sti = list() + # loop all found subtrack items and check if they are enabled + for _sti in _sub_track_items: + # checking if not enabled + if not _sti.isEnabled(): + continue + if isinstance(_sti, hiero.core.Annotation): + continue + # collect the subtrack item + enabled_sti.append(_sti) + + # continue only if any subtrack items are collected + if len(enabled_sti) < 1: + continue + + # add collection of subtrackitems to dict + sub_track_items[track_index] = enabled_sti + + return sub_track_items diff --git a/openpype/hosts/hiero/plugins/publish_old_workflow/precollect_workfile.py b/openpype/hosts/hiero/plugins/publish_old_workflow/precollect_workfile.py new file mode 100644 index 0000000000..ef7d07421b --- /dev/null +++ b/openpype/hosts/hiero/plugins/publish_old_workflow/precollect_workfile.py @@ -0,0 +1,74 @@ +import os +import pyblish.api +from openpype.hosts.hiero import api as phiero +from avalon import api as avalon + + +class PreCollectWorkfile(pyblish.api.ContextPlugin): + """Inject the current working file into context""" + + label = "Pre-collect Workfile" + order = pyblish.api.CollectorOrder - 0.51 + + def process(self, context): + asset = avalon.Session["AVALON_ASSET"] + subset = "workfile" + + project = phiero.get_current_project() + active_sequence = phiero.get_current_sequence() + video_tracks = active_sequence.videoTracks() + audio_tracks = active_sequence.audioTracks() + current_file = project.path() + staging_dir = os.path.dirname(current_file) + base_name = os.path.basename(current_file) + + # get workfile's colorspace properties + _clrs = {} + _clrs["useOCIOEnvironmentOverride"] = project.useOCIOEnvironmentOverride() # noqa + _clrs["lutSetting16Bit"] = project.lutSetting16Bit() + _clrs["lutSetting8Bit"] = project.lutSetting8Bit() + _clrs["lutSettingFloat"] = project.lutSettingFloat() + _clrs["lutSettingLog"] = project.lutSettingLog() + _clrs["lutSettingViewer"] = project.lutSettingViewer() + _clrs["lutSettingWorkingSpace"] = project.lutSettingWorkingSpace() + _clrs["lutUseOCIOForExport"] = project.lutUseOCIOForExport() + _clrs["ocioConfigName"] = project.ocioConfigName() + _clrs["ocioConfigPath"] = project.ocioConfigPath() + + # set main project attributes to context + context.data["activeProject"] = project + context.data["activeSequence"] = active_sequence + context.data["videoTracks"] = video_tracks + context.data["audioTracks"] = audio_tracks + context.data["currentFile"] = current_file + context.data["colorspace"] = _clrs + + self.log.info("currentFile: {}".format(current_file)) + + # creating workfile representation + representation = { + 'name': 'hrox', + 'ext': 'hrox', + 'files': base_name, + "stagingDir": staging_dir, + } + + instance_data = { + "name": "{}_{}".format(asset, subset), + "asset": asset, + "subset": "{}{}".format(asset, subset.capitalize()), + "item": project, + "family": "workfile", + + # version data + "versionData": { + "colorspace": _clrs + }, + + # source attribute + "sourcePath": current_file, + "representations": [representation] + } + + instance = context.create_instance(**instance_data) + self.log.info("Creating instance: {}".format(instance)) diff --git a/openpype/hosts/hiero/plugins/publish/validate_audio.py b/openpype/hosts/hiero/plugins/publish_old_workflow/validate_audio.py similarity index 100% rename from openpype/hosts/hiero/plugins/publish/validate_audio.py rename to openpype/hosts/hiero/plugins/publish_old_workflow/validate_audio.py diff --git a/openpype/hosts/hiero/plugins/publish/validate_hierarchy.py b/openpype/hosts/hiero/plugins/publish_old_workflow/validate_hierarchy.py similarity index 100% rename from openpype/hosts/hiero/plugins/publish/validate_hierarchy.py rename to openpype/hosts/hiero/plugins/publish_old_workflow/validate_hierarchy.py diff --git a/openpype/hosts/hiero/plugins/publish/validate_names.py b/openpype/hosts/hiero/plugins/publish_old_workflow/validate_names.py similarity index 100% rename from openpype/hosts/hiero/plugins/publish/validate_names.py rename to openpype/hosts/hiero/plugins/publish_old_workflow/validate_names.py diff --git a/openpype/hosts/hiero/startup/Python/Startup/Startup.py b/openpype/hosts/hiero/startup/Python/Startup/Startup.py index 8de2dc2d11..21c21cd7c3 100644 --- a/openpype/hosts/hiero/startup/Python/Startup/Startup.py +++ b/openpype/hosts/hiero/startup/Python/Startup/Startup.py @@ -6,7 +6,7 @@ import openpype.hosts.hiero.api as phiero avalon.api.install(phiero) try: - __import__("pype.hosts.hiero.api") + __import__("openpype.hosts.hiero.api") __import__("pyblish") except ImportError as e: diff --git a/openpype/hosts/hiero/startup/Python/Startup/otioexporter/OTIOExportTask.py b/openpype/hosts/hiero/startup/Python/Startup/otioexporter/OTIOExportTask.py index 90504ccd18..7e1a8df2dc 100644 --- a/openpype/hosts/hiero/startup/Python/Startup/otioexporter/OTIOExportTask.py +++ b/openpype/hosts/hiero/startup/Python/Startup/otioexporter/OTIOExportTask.py @@ -1,338 +1,28 @@ -# MIT License -# -# Copyright (c) 2018 Daniel Flehner Heen (Storm Studios) -# -# Permission is hereby granted, free of charge, to any person obtaining a copy -# of this software and associated documentation files (the "Software"), to deal -# in the Software without restriction, including without limitation the rights -# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell -# copies of the Software, and to permit persons to whom the Software is -# furnished to do so, subject to the following conditions: -# -# The above copyright notice and this permission notice shall be included in -# all copies or substantial portions of the Software. -# -# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR -# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, -# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE -# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER -# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, -# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE -# SOFTWARE. +#!/usr/bin/env python +# -*- coding: utf-8 -*- +__author__ = "Daniel Flehner Heen" +__credits__ = ["Jakub Jezek", "Daniel Flehner Heen"] import os -import re import hiero.core from hiero.core import util import opentimelineio as otio - - -marker_color_map = { - "magenta": otio.schema.MarkerColor.MAGENTA, - "red": otio.schema.MarkerColor.RED, - "yellow": otio.schema.MarkerColor.YELLOW, - "green": otio.schema.MarkerColor.GREEN, - "cyan": otio.schema.MarkerColor.CYAN, - "blue": otio.schema.MarkerColor.BLUE, -} - +from openpype.hosts.hiero.otio import hiero_export class OTIOExportTask(hiero.core.TaskBase): def __init__(self, initDict): """Initialize""" hiero.core.TaskBase.__init__(self, initDict) + self.otio_timeline = None def name(self): return str(type(self)) - def get_rate(self, item): - if not hasattr(item, 'framerate'): - item = item.sequence() - - num, den = item.framerate().toRational() - rate = float(num) / float(den) - - if rate.is_integer(): - return rate - - return round(rate, 2) - - def get_clip_ranges(self, trackitem): - # Get rate from source or sequence - if trackitem.source().mediaSource().hasVideo(): - rate_item = trackitem.source() - - else: - rate_item = trackitem.sequence() - - source_rate = self.get_rate(rate_item) - - # Reversed video/audio - if trackitem.playbackSpeed() < 0: - start = trackitem.sourceOut() - - else: - start = trackitem.sourceIn() - - source_start_time = otio.opentime.RationalTime( - start, - source_rate - ) - source_duration = otio.opentime.RationalTime( - trackitem.duration(), - source_rate - ) - - source_range = otio.opentime.TimeRange( - start_time=source_start_time, - duration=source_duration - ) - - hiero_clip = trackitem.source() - - available_range = None - if hiero_clip.mediaSource().isMediaPresent(): - start_time = otio.opentime.RationalTime( - hiero_clip.mediaSource().startTime(), - source_rate - ) - duration = otio.opentime.RationalTime( - hiero_clip.mediaSource().duration(), - source_rate - ) - available_range = otio.opentime.TimeRange( - start_time=start_time, - duration=duration - ) - - return source_range, available_range - - def add_gap(self, trackitem, otio_track, prev_out): - gap_length = trackitem.timelineIn() - prev_out - if prev_out != 0: - gap_length -= 1 - - rate = self.get_rate(trackitem.sequence()) - gap = otio.opentime.TimeRange( - duration=otio.opentime.RationalTime( - gap_length, - rate - ) - ) - otio_gap = otio.schema.Gap(source_range=gap) - otio_track.append(otio_gap) - - def get_marker_color(self, tag): - icon = tag.icon() - pat = r'icons:Tag(?P\w+)\.\w+' - - res = re.search(pat, icon) - if res: - color = res.groupdict().get('color') - if color.lower() in marker_color_map: - return marker_color_map[color.lower()] - - return otio.schema.MarkerColor.RED - - def add_markers(self, hiero_item, otio_item): - for tag in hiero_item.tags(): - if not tag.visible(): - continue - - if tag.name() == 'Copy': - # Hiero adds this tag to a lot of clips - continue - - frame_rate = self.get_rate(hiero_item) - - marked_range = otio.opentime.TimeRange( - start_time=otio.opentime.RationalTime( - tag.inTime(), - frame_rate - ), - duration=otio.opentime.RationalTime( - int(tag.metadata().dict().get('tag.length', '0')), - frame_rate - ) - ) - - metadata = dict( - Hiero=tag.metadata().dict() - ) - # Store the source item for future import assignment - metadata['Hiero']['source_type'] = hiero_item.__class__.__name__ - - marker = otio.schema.Marker( - name=tag.name(), - color=self.get_marker_color(tag), - marked_range=marked_range, - metadata=metadata - ) - - otio_item.markers.append(marker) - - def add_clip(self, trackitem, otio_track, itemindex): - hiero_clip = trackitem.source() - - # Add Gap if needed - if itemindex == 0: - prev_item = trackitem - - else: - prev_item = trackitem.parent().items()[itemindex - 1] - - clip_diff = trackitem.timelineIn() - prev_item.timelineOut() - - if itemindex == 0 and trackitem.timelineIn() > 0: - self.add_gap(trackitem, otio_track, 0) - - elif itemindex and clip_diff != 1: - self.add_gap(trackitem, otio_track, prev_item.timelineOut()) - - # Create Clip - source_range, available_range = self.get_clip_ranges(trackitem) - - otio_clip = otio.schema.Clip( - name=trackitem.name(), - source_range=source_range - ) - - # Add media reference - media_reference = otio.schema.MissingReference() - if hiero_clip.mediaSource().isMediaPresent(): - source = hiero_clip.mediaSource() - first_file = source.fileinfos()[0] - path = first_file.filename() - - if "%" in path: - path = re.sub(r"%\d+d", "%d", path) - if "#" in path: - path = re.sub(r"#+", "%d", path) - - media_reference = otio.schema.ExternalReference( - target_url=u'{}'.format(path), - available_range=available_range - ) - - otio_clip.media_reference = media_reference - - # Add Time Effects - playbackspeed = trackitem.playbackSpeed() - if playbackspeed != 1: - if playbackspeed == 0: - time_effect = otio.schema.FreezeFrame() - - else: - time_effect = otio.schema.LinearTimeWarp( - time_scalar=playbackspeed - ) - otio_clip.effects.append(time_effect) - - # Add tags as markers - if self._preset.properties()["includeTags"]: - self.add_markers(trackitem, otio_clip) - self.add_markers(trackitem.source(), otio_clip) - - otio_track.append(otio_clip) - - # Add Transition if needed - if trackitem.inTransition() or trackitem.outTransition(): - self.add_transition(trackitem, otio_track) - - def add_transition(self, trackitem, otio_track): - transitions = [] - - if trackitem.inTransition(): - if trackitem.inTransition().alignment().name == 'kFadeIn': - transitions.append(trackitem.inTransition()) - - if trackitem.outTransition(): - transitions.append(trackitem.outTransition()) - - for transition in transitions: - alignment = transition.alignment().name - - if alignment == 'kFadeIn': - in_offset_frames = 0 - out_offset_frames = ( - transition.timelineOut() - transition.timelineIn() - ) + 1 - - elif alignment == 'kFadeOut': - in_offset_frames = ( - trackitem.timelineOut() - transition.timelineIn() - ) + 1 - out_offset_frames = 0 - - elif alignment == 'kDissolve': - in_offset_frames = ( - transition.inTrackItem().timelineOut() - - transition.timelineIn() - ) - out_offset_frames = ( - transition.timelineOut() - - transition.outTrackItem().timelineIn() - ) - - else: - # kUnknown transition is ignored - continue - - rate = trackitem.source().framerate().toFloat() - in_time = otio.opentime.RationalTime(in_offset_frames, rate) - out_time = otio.opentime.RationalTime(out_offset_frames, rate) - - otio_transition = otio.schema.Transition( - name=alignment, # Consider placing Hiero name in metadata - transition_type=otio.schema.TransitionTypes.SMPTE_Dissolve, - in_offset=in_time, - out_offset=out_time - ) - - if alignment == 'kFadeIn': - otio_track.insert(-1, otio_transition) - - else: - otio_track.append(otio_transition) - - - def add_tracks(self): - for track in self._sequence.items(): - if isinstance(track, hiero.core.AudioTrack): - kind = otio.schema.TrackKind.Audio - - else: - kind = otio.schema.TrackKind.Video - - otio_track = otio.schema.Track(name=track.name(), kind=kind) - - for itemindex, trackitem in enumerate(track): - if isinstance(trackitem.source(), hiero.core.Clip): - self.add_clip(trackitem, otio_track, itemindex) - - self.otio_timeline.tracks.append(otio_track) - - # Add tags as markers - if self._preset.properties()["includeTags"]: - self.add_markers(self._sequence, self.otio_timeline.tracks) - - def create_OTIO(self): - self.otio_timeline = otio.schema.Timeline() - - # Set global start time based on sequence - self.otio_timeline.global_start_time = otio.opentime.RationalTime( - self._sequence.timecodeStart(), - self._sequence.framerate().toFloat() - ) - self.otio_timeline.name = self._sequence.name() - - self.add_tracks() - def startTask(self): - self.create_OTIO() + self.otio_timeline = hiero_export.create_otio_timeline() def taskStep(self): return False @@ -350,7 +40,7 @@ class OTIOExportTask(hiero.core.TaskBase): util.filesystem.makeDirs(dirname) # write otio file - otio.adapters.write_to_file(self.otio_timeline, exportPath) + hiero_export.write_to_file(self.otio_timeline, exportPath) # Catch all exceptions and log error except Exception as e: @@ -370,7 +60,7 @@ class OTIOExportPreset(hiero.core.TaskPresetBase): """Initialise presets to default values""" hiero.core.TaskPresetBase.__init__(self, OTIOExportTask, name) - self.properties()["includeTags"] = True + self.properties()["includeTags"] = hiero_export.include_tags = True self.properties().update(properties) def supportedItems(self): diff --git a/openpype/hosts/hiero/startup/Python/Startup/otioexporter/OTIOExportUI.py b/openpype/hosts/hiero/startup/Python/Startup/otioexporter/OTIOExportUI.py index 887ff05ec8..9b83eefedf 100644 --- a/openpype/hosts/hiero/startup/Python/Startup/otioexporter/OTIOExportUI.py +++ b/openpype/hosts/hiero/startup/Python/Startup/otioexporter/OTIOExportUI.py @@ -1,3 +1,9 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- + +__author__ = "Daniel Flehner Heen" +__credits__ = ["Jakub Jezek", "Daniel Flehner Heen"] + import hiero.ui import OTIOExportTask @@ -14,6 +20,7 @@ except ImportError: FormLayout = QFormLayout # lint:ok +from openpype.hosts.hiero.otio import hiero_export class OTIOExportUI(hiero.ui.TaskUIBase): def __init__(self, preset): @@ -27,7 +34,7 @@ class OTIOExportUI(hiero.ui.TaskUIBase): def includeMarkersCheckboxChanged(self, state): # Slot to handle change of checkbox state - self._preset.properties()["includeTags"] = state == QtCore.Qt.Checked + hiero_export.include_tags = state == QtCore.Qt.Checked def populateUI(self, widget, exportTemplate): layout = widget.layout() diff --git a/openpype/hosts/hiero/startup/Python/Startup/otioexporter/__init__.py b/openpype/hosts/hiero/startup/Python/Startup/otioexporter/__init__.py index 67e6e78d35..3c09655f01 100644 --- a/openpype/hosts/hiero/startup/Python/Startup/otioexporter/__init__.py +++ b/openpype/hosts/hiero/startup/Python/Startup/otioexporter/__init__.py @@ -1,25 +1,3 @@ -# MIT License -# -# Copyright (c) 2018 Daniel Flehner Heen (Storm Studios) -# -# Permission is hereby granted, free of charge, to any person obtaining a copy -# of this software and associated documentation files (the "Software"), to deal -# in the Software without restriction, including without limitation the rights -# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell -# copies of the Software, and to permit persons to whom the Software is -# furnished to do so, subject to the following conditions: -# -# The above copyright notice and this permission notice shall be included in -# all copies or substantial portions of the Software. -# -# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR -# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, -# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE -# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER -# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, -# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE -# SOFTWARE. - from OTIOExportTask import OTIOExportTask from OTIOExportUI import OTIOExportUI diff --git a/openpype/hosts/hiero/startup/Python/StartupUI/otioimporter/__init__.py b/openpype/hosts/hiero/startup/Python/StartupUI/otioimporter/__init__.py index 1503a9e9ac..0f0a643909 100644 --- a/openpype/hosts/hiero/startup/Python/StartupUI/otioimporter/__init__.py +++ b/openpype/hosts/hiero/startup/Python/StartupUI/otioimporter/__init__.py @@ -1,42 +1,91 @@ -# MIT License -# -# Copyright (c) 2018 Daniel Flehner Heen (Storm Studios) -# -# Permission is hereby granted, free of charge, to any person obtaining a copy -# of this software and associated documentation files (the "Software"), to deal -# in the Software without restriction, including without limitation the rights -# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell -# copies of the Software, and to permit persons to whom the Software is -# furnished to do so, subject to the following conditions: -# -# The above copyright notice and this permission notice shall be included in -# all copies or substantial portions of the Software. -# -# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR -# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, -# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE -# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER -# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, -# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE -# SOFTWARE. +#!/usr/bin/env python +# -*- coding: utf-8 -*- + +__author__ = "Daniel Flehner Heen" +__credits__ = ["Jakub Jezek", "Daniel Flehner Heen"] import hiero.ui import hiero.core -from otioimporter.OTIOImport import load_otio +import PySide2.QtWidgets as qw + +from openpype.hosts.hiero.otio.hiero_import import load_otio + + +class OTIOProjectSelect(qw.QDialog): + + def __init__(self, projects, *args, **kwargs): + super(OTIOProjectSelect, self).__init__(*args, **kwargs) + self.setWindowTitle('Please select active project') + self.layout = qw.QVBoxLayout() + + self.label = qw.QLabel( + 'Unable to determine which project to import sequence to.\n' + 'Please select one.' + ) + self.layout.addWidget(self.label) + + self.projects = qw.QComboBox() + self.projects.addItems(map(lambda p: p.name(), projects)) + self.layout.addWidget(self.projects) + + QBtn = qw.QDialogButtonBox.Ok | qw.QDialogButtonBox.Cancel + self.buttonBox = qw.QDialogButtonBox(QBtn) + self.buttonBox.accepted.connect(self.accept) + self.buttonBox.rejected.connect(self.reject) + + self.layout.addWidget(self.buttonBox) + self.setLayout(self.layout) + + +def get_sequence(view): + sequence = None + if isinstance(view, hiero.ui.TimelineEditor): + sequence = view.sequence() + + elif isinstance(view, hiero.ui.BinView): + for item in view.selection(): + if not hasattr(item, 'acitveItem'): + continue + + if isinstance(item.activeItem(), hiero.core.Sequence): + sequence = item.activeItem() + + return sequence def OTIO_menu_action(event): - otio_action = hiero.ui.createMenuAction( - 'Import OTIO', + # Menu actions + otio_import_action = hiero.ui.createMenuAction( + 'Import OTIO...', open_otio_file, icon=None ) - hiero.ui.registerAction(otio_action) + + otio_add_track_action = hiero.ui.createMenuAction( + 'New Track(s) from OTIO...', + open_otio_file, + icon=None + ) + otio_add_track_action.setEnabled(False) + + hiero.ui.registerAction(otio_import_action) + hiero.ui.registerAction(otio_add_track_action) + + view = hiero.ui.currentContextMenuView() + + if view: + sequence = get_sequence(view) + if sequence: + otio_add_track_action.setEnabled(True) + for action in event.menu.actions(): if action.text() == 'Import': - action.menu().addAction(otio_action) - break + action.menu().addAction(otio_import_action) + action.menu().addAction(otio_add_track_action) + + elif action.text() == 'New Track': + action.menu().addAction(otio_add_track_action) def open_otio_file(): @@ -45,8 +94,39 @@ def open_otio_file(): pattern='*.otio', requiredExtension='.otio' ) + + selection = None + sequence = None + + view = hiero.ui.currentContextMenuView() + if view: + sequence = get_sequence(view) + selection = view.selection() + + if sequence: + project = sequence.project() + + elif selection: + project = selection[0].project() + + elif len(hiero.core.projects()) > 1: + dialog = OTIOProjectSelect(hiero.core.projects()) + if dialog.exec_(): + project = hiero.core.projects()[dialog.projects.currentIndex()] + + else: + bar = hiero.ui.mainWindow().statusBar() + bar.showMessage( + 'OTIO Import aborted by user', + timeout=3000 + ) + return + + else: + project = hiero.core.projects()[-1] + for otio_file in files: - load_otio(otio_file) + load_otio(otio_file, project, sequence) # HieroPlayer is quite limited and can't create transitions etc. @@ -55,3 +135,7 @@ if not hiero.core.isHieroPlayer(): "kShowContextMenu/kBin", OTIO_menu_action ) + hiero.core.events.registerInterest( + "kShowContextMenu/kTimeline", + OTIO_menu_action + ) diff --git a/openpype/hosts/houdini/api/lib.py b/openpype/hosts/houdini/api/lib.py index dd586ca02d..1f0f90811f 100644 --- a/openpype/hosts/houdini/api/lib.py +++ b/openpype/hosts/houdini/api/lib.py @@ -210,7 +210,7 @@ def validate_fps(): if current_fps != fps: - from ...widgets import popup + from openpype.widgets import popup # Find main window parent = hou.ui.mainQtWindow() @@ -219,8 +219,8 @@ def validate_fps(): else: dialog = popup.Popup2(parent=parent) dialog.setModal(True) - dialog.setWindowTitle("Maya scene not in line with project") - dialog.setMessage("The FPS is out of sync, please fix") + dialog.setWindowTitle("Houdini scene not in line with project") + dialog.setMessage("The FPS is out of sync, please fix it") # Set new text for button (add optional argument for the popup?) toggle = dialog.widgets["toggle"] diff --git a/openpype/hosts/houdini/startup/MainMenuCommon.XML b/openpype/hosts/houdini/startup/MainMenuCommon.xml similarity index 100% rename from openpype/hosts/houdini/startup/MainMenuCommon.XML rename to openpype/hosts/houdini/startup/MainMenuCommon.xml diff --git a/openpype/hosts/maya/api/lib.py b/openpype/hosts/maya/api/lib.py index ae2d329a97..a83ff98c99 100644 --- a/openpype/hosts/maya/api/lib.py +++ b/openpype/hosts/maya/api/lib.py @@ -1872,7 +1872,7 @@ def set_context_settings(): # Set project fps fps = asset_data.get("fps", project_data.get("fps", 25)) - api.Session["AVALON_FPS"] = fps + api.Session["AVALON_FPS"] = str(fps) set_scene_fps(fps) # Set project resolution diff --git a/openpype/hosts/maya/plugins/create/create_look.py b/openpype/hosts/maya/plugins/create/create_look.py index 96266aa799..36a3120a9a 100644 --- a/openpype/hosts/maya/plugins/create/create_look.py +++ b/openpype/hosts/maya/plugins/create/create_look.py @@ -12,6 +12,7 @@ class CreateLook(plugin.Creator): family = "look" icon = "paint-brush" defaults = ['Main'] + make_tx = True def __init__(self, *args, **kwargs): super(CreateLook, self).__init__(*args, **kwargs) @@ -19,7 +20,7 @@ class CreateLook(plugin.Creator): self.data["renderlayer"] = lib.get_current_renderlayer() # Whether to automatically convert the textures to .tx upon publish. - self.data["maketx"] = True + self.data["maketx"] = self.make_tx # Enable users to force a copy. self.data["forceCopy"] = False diff --git a/openpype/hosts/maya/plugins/create/create_redshift_proxy.py b/openpype/hosts/maya/plugins/create/create_redshift_proxy.py new file mode 100644 index 0000000000..419a8d99d4 --- /dev/null +++ b/openpype/hosts/maya/plugins/create/create_redshift_proxy.py @@ -0,0 +1,23 @@ +# -*- coding: utf-8 -*- +"""Creator of Redshift proxy subset types.""" + +from openpype.hosts.maya.api import plugin, lib + + +class CreateRedshiftProxy(plugin.Creator): + """Create instance of Redshift Proxy subset.""" + + name = "redshiftproxy" + label = "Redshift Proxy" + family = "redshiftproxy" + icon = "gears" + + def __init__(self, *args, **kwargs): + super(CreateRedshiftProxy, self).__init__(*args, **kwargs) + + animation_data = lib.collect_animation_data() + + self.data["animation"] = False + self.data["proxyFrameStart"] = animation_data["frameStart"] + self.data["proxyFrameEnd"] = animation_data["frameEnd"] + self.data["proxyFrameStep"] = animation_data["step"] diff --git a/openpype/hosts/maya/plugins/load/load_look.py b/openpype/hosts/maya/plugins/load/load_look.py index 4392d1f78d..c39bbc497e 100644 --- a/openpype/hosts/maya/plugins/load/load_look.py +++ b/openpype/hosts/maya/plugins/load/load_look.py @@ -105,7 +105,23 @@ class LookLoader(openpype.hosts.maya.api.plugin.ReferenceLoader): # Load relationships shader_relation = api.get_representation_path(json_representation) with open(shader_relation, "r") as f: - relationships = json.load(f) + json_data = json.load(f) + + for rel, data in json_data["relationships"].items(): + # process only non-shading nodes + current_node = "{}:{}".format(container["namespace"], rel) + if current_node in shader_nodes: + continue + print("processing {}".format(rel)) + current_members = set(cmds.ls( + cmds.sets(current_node, query=True) or [], long=True)) + new_members = {"{}".format( + m["name"]) for m in data["members"] or []} + dif = new_members.difference(current_members) + + # add to set + cmds.sets( + dif, forceElement="{}:{}".format(container["namespace"], rel)) # update of reference could result in failed edits - material is not # present because of renaming etc. @@ -120,7 +136,7 @@ class LookLoader(openpype.hosts.maya.api.plugin.ReferenceLoader): cmds.file(cr=reference_node) # cleanReference # reapply shading groups from json representation on orig nodes - openpype.hosts.maya.api.lib.apply_shaders(relationships, + openpype.hosts.maya.api.lib.apply_shaders(json_data, shader_nodes, orig_nodes) @@ -128,12 +144,13 @@ class LookLoader(openpype.hosts.maya.api.plugin.ReferenceLoader): "All successful edits were kept intact.\n", "Failed and removed edits:"] msg.extend(failed_edits) + msg = ScrollMessageBox(QtWidgets.QMessageBox.Warning, "Some reference edit failed", msg) msg.exec_() - attributes = relationships.get("attributes", []) + attributes = json_data.get("attributes", []) # region compute lookup nodes_by_id = defaultdict(list) diff --git a/openpype/hosts/maya/plugins/load/load_redshift_proxy.py b/openpype/hosts/maya/plugins/load/load_redshift_proxy.py new file mode 100644 index 0000000000..4c6a187bc3 --- /dev/null +++ b/openpype/hosts/maya/plugins/load/load_redshift_proxy.py @@ -0,0 +1,146 @@ +# -*- coding: utf-8 -*- +"""Loader for Redshift proxy.""" +from avalon.maya import lib +from avalon import api +from openpype.api import get_project_settings +import os +import maya.cmds as cmds +import clique + + +class RedshiftProxyLoader(api.Loader): + """Load Redshift proxy""" + + families = ["redshiftproxy"] + representations = ["rs"] + + label = "Import Redshift Proxy" + order = -10 + icon = "code-fork" + color = "orange" + + def load(self, context, name=None, namespace=None, options=None): + """Plugin entry point.""" + + from avalon.maya.pipeline import containerise + from openpype.hosts.maya.api.lib import namespaced + + try: + family = context["representation"]["context"]["family"] + except ValueError: + family = "redshiftproxy" + + asset_name = context['asset']["name"] + namespace = namespace or lib.unique_namespace( + asset_name + "_", + prefix="_" if asset_name[0].isdigit() else "", + suffix="_", + ) + + # Ensure Redshift for Maya is loaded. + cmds.loadPlugin("redshift4maya", quiet=True) + + with lib.maintained_selection(): + cmds.namespace(addNamespace=namespace) + with namespaced(namespace, new=False): + nodes, group_node = self.create_rs_proxy( + name, self.fname) + + self[:] = nodes + if not nodes: + return + + # colour the group node + settings = get_project_settings(os.environ['AVALON_PROJECT']) + colors = settings['maya']['load']['colors'] + c = colors.get(family) + if c is not None: + cmds.setAttr("{0}.useOutlinerColor".format(group_node), 1) + cmds.setAttr("{0}.outlinerColor".format(group_node), + c[0], c[1], c[2]) + + return containerise( + name=name, + namespace=namespace, + nodes=nodes, + context=context, + loader=self.__class__.__name__) + + def update(self, container, representation): + + node = container['objectName'] + assert cmds.objExists(node), "Missing container" + + members = cmds.sets(node, query=True) or [] + rs_meshes = cmds.ls(members, type="RedshiftProxyMesh") + assert rs_meshes, "Cannot find RedshiftProxyMesh in container" + + filename = api.get_representation_path(representation) + + for rs_mesh in rs_meshes: + cmds.setAttr("{}.fileName".format(rs_mesh), + filename, + type="string") + + # Update metadata + cmds.setAttr("{}.representation".format(node), + str(representation["_id"]), + type="string") + + def remove(self, container): + + # Delete container and its contents + if cmds.objExists(container['objectName']): + members = cmds.sets(container['objectName'], query=True) or [] + cmds.delete([container['objectName']] + members) + + # Remove the namespace, if empty + namespace = container['namespace'] + if cmds.namespace(exists=namespace): + members = cmds.namespaceInfo(namespace, listNamespace=True) + if not members: + cmds.namespace(removeNamespace=namespace) + else: + self.log.warning("Namespace not deleted because it " + "still has members: %s", namespace) + + def switch(self, container, representation): + self.update(container, representation) + + def create_rs_proxy(self, name, path): + """Creates Redshift Proxies showing a proxy object. + + Args: + name (str): Proxy name. + path (str): Path to proxy file. + + Returns: + (str, str): Name of mesh with Redshift proxy and its parent + transform. + + """ + rs_mesh = cmds.createNode( + 'RedshiftProxyMesh', name="{}_RS".format(name)) + mesh_shape = cmds.createNode("mesh", name="{}_GEOShape".format(name)) + + cmds.setAttr("{}.fileName".format(rs_mesh), + path, + type="string") + + cmds.connectAttr("{}.outMesh".format(rs_mesh), + "{}.inMesh".format(mesh_shape)) + + group_node = cmds.group(empty=True, name="{}_GRP".format(name)) + mesh_transform = cmds.listRelatives(mesh_shape, + parent=True, fullPath=True) + cmds.parent(mesh_transform, group_node) + nodes = [rs_mesh, mesh_shape, group_node] + + # determine if we need to enable animation support + files_in_folder = os.listdir(os.path.dirname(path)) + collections, remainder = clique.assemble(files_in_folder) + + if collections: + cmds.setAttr("{}.useFrameExtension".format(rs_mesh), 1) + + return nodes, group_node diff --git a/openpype/hosts/maya/plugins/publish/collect_look.py b/openpype/hosts/maya/plugins/publish/collect_look.py index acc6d8f128..bf24b463ac 100644 --- a/openpype/hosts/maya/plugins/publish/collect_look.py +++ b/openpype/hosts/maya/plugins/publish/collect_look.py @@ -1,8 +1,10 @@ +# -*- coding: utf-8 -*- +"""Maya look collector.""" import re import os import glob -from maya import cmds +from maya import cmds # noqa import pyblish.api from openpype.hosts.maya.api import lib @@ -16,6 +18,11 @@ SHAPE_ATTRS = ["castsShadows", "doubleSided", "opposite"] +RENDERER_NODE_TYPES = [ + # redshift + "RedshiftMeshParameters" +] + SHAPE_ATTRS = set(SHAPE_ATTRS) @@ -29,7 +36,6 @@ def get_look_attrs(node): list: Attribute names to extract """ - # When referenced get only attributes that are "changed since file open" # which includes any reference edits, otherwise take *all* user defined # attributes @@ -219,9 +225,13 @@ class CollectLook(pyblish.api.InstancePlugin): with lib.renderlayer(instance.data["renderlayer"]): self.collect(instance) - def collect(self, instance): + """Collect looks. + Args: + instance: Instance to collect. + + """ self.log.info("Looking for look associations " "for %s" % instance.data['name']) @@ -235,48 +245,91 @@ class CollectLook(pyblish.api.InstancePlugin): self.log.info("Gathering set relations..") # Ensure iteration happen in a list so we can remove keys from the # dict within the loop - for objset in list(sets): - self.log.debug("From %s.." % objset) + + # skipped types of attribute on render specific nodes + disabled_types = ["message", "TdataCompound"] + + for obj_set in list(sets): + self.log.debug("From {}".format(obj_set)) + + # if node is specified as renderer node type, it will be + # serialized with its attributes. + if cmds.nodeType(obj_set) in RENDERER_NODE_TYPES: + self.log.info("- {} is {}".format( + obj_set, cmds.nodeType(obj_set))) + + node_attrs = [] + + # serialize its attributes so they can be recreated on look + # load. + for attr in cmds.listAttr(obj_set): + # skip publishedNodeInfo attributes as they break + # getAttr() and we don't need them anyway + if attr.startswith("publishedNodeInfo"): + continue + + # skip attributes types defined in 'disabled_type' list + if cmds.getAttr("{}.{}".format(obj_set, attr), type=True) in disabled_types: # noqa + continue + + node_attrs.append(( + attr, + cmds.getAttr("{}.{}".format(obj_set, attr)), + cmds.getAttr( + "{}.{}".format(obj_set, attr), type=True) + )) + + for member in cmds.ls( + cmds.sets(obj_set, query=True), long=True): + member_data = self.collect_member_data(member, + instance_lookup) + if not member_data: + continue + + # Add information of the node to the members list + sets[obj_set]["members"].append(member_data) # Get all nodes of the current objectSet (shadingEngine) - for member in cmds.ls(cmds.sets(objset, query=True), long=True): + for member in cmds.ls(cmds.sets(obj_set, query=True), long=True): member_data = self.collect_member_data(member, instance_lookup) if not member_data: continue # Add information of the node to the members list - sets[objset]["members"].append(member_data) + sets[obj_set]["members"].append(member_data) # Remove sets that didn't have any members assigned in the end # Thus the data will be limited to only what we need. - self.log.info("objset {}".format(sets[objset])) - if not sets[objset]["members"] or (not objset.endswith("SG")): - self.log.info("Removing redundant set information: " - "%s" % objset) - sets.pop(objset, None) + self.log.info("obj_set {}".format(sets[obj_set])) + if not sets[obj_set]["members"]: + self.log.info( + "Removing redundant set information: {}".format(obj_set)) + sets.pop(obj_set, None) self.log.info("Gathering attribute changes to instance members..") attributes = self.collect_attributes_changed(instance) # Store data on the instance - instance.data["lookData"] = {"attributes": attributes, - "relationships": sets} + instance.data["lookData"] = { + "attributes": attributes, + "relationships": sets + } # Collect file nodes used by shading engines (if we have any) - files = list() - looksets = sets.keys() - shaderAttrs = [ - "surfaceShader", - "volumeShader", - "displacementShader", - "aiSurfaceShader", - "aiVolumeShader"] - materials = list() + files = [] + look_sets = sets.keys() + shader_attrs = [ + "surfaceShader", + "volumeShader", + "displacementShader", + "aiSurfaceShader", + "aiVolumeShader"] + if look_sets: + materials = [] - if looksets: - for look in looksets: - for at in shaderAttrs: + for look in look_sets: + for at in shader_attrs: try: con = cmds.listConnections("{}.{}".format(look, at)) except ValueError: @@ -289,12 +342,19 @@ class CollectLook(pyblish.api.InstancePlugin): self.log.info("Found materials:\n{}".format(materials)) - self.log.info("Found the following sets:\n{}".format(looksets)) + self.log.info("Found the following sets:\n{}".format(look_sets)) # Get the entire node chain of the look sets - # history = cmds.listHistory(looksets) - history = list() + # history = cmds.listHistory(look_sets) + history = [] for material in materials: history.extend(cmds.listHistory(material)) + + # handle VrayPluginNodeMtl node - see #1397 + vray_plugin_nodes = cmds.ls( + history, type="VRayPluginNodeMtl", long=True) + for vray_node in vray_plugin_nodes: + history.extend(cmds.listHistory(vray_node)) + files = cmds.ls(history, type="file", long=True) files.extend(cmds.ls(history, type="aiImage", long=True)) @@ -313,7 +373,7 @@ class CollectLook(pyblish.api.InstancePlugin): # Ensure unique shader sets # Add shader sets to the instance for unify ID validation - instance.extend(shader for shader in looksets if shader + instance.extend(shader for shader in look_sets if shader not in instance_lookup) self.log.info("Collected look for %s" % instance) @@ -331,7 +391,7 @@ class CollectLook(pyblish.api.InstancePlugin): dict """ - sets = dict() + sets = {} for node in instance: related_sets = lib.get_related_sets(node) if not related_sets: @@ -427,6 +487,11 @@ class CollectLook(pyblish.api.InstancePlugin): """ self.log.debug("processing: {}".format(node)) + if cmds.nodeType(node) not in ["file", "aiImage"]: + self.log.error( + "Unsupported file node: {}".format(cmds.nodeType(node))) + raise AssertionError("Unsupported file node") + if cmds.nodeType(node) == 'file': self.log.debug(" - file node") attribute = "{}.fileTextureName".format(node) @@ -435,6 +500,7 @@ class CollectLook(pyblish.api.InstancePlugin): self.log.debug("aiImage node") attribute = "{}.filename".format(node) computed_attribute = attribute + source = cmds.getAttr(attribute) self.log.info(" - file source: {}".format(source)) color_space_attr = "{}.colorSpace".format(node) diff --git a/openpype/hosts/maya/plugins/publish/collect_render.py b/openpype/hosts/maya/plugins/publish/collect_render.py index 75749a952e..647a46e240 100644 --- a/openpype/hosts/maya/plugins/publish/collect_render.py +++ b/openpype/hosts/maya/plugins/publish/collect_render.py @@ -358,9 +358,7 @@ class CollectMayaRender(pyblish.api.ContextPlugin): options["extendFrames"] = extend_frames options["overrideExistingFrame"] = override_frames - maya_render_plugin = "MayaPype" - if attributes.get("useMayaBatch", True): - maya_render_plugin = "MayaBatch" + maya_render_plugin = "MayaBatch" options["mayaRenderPlugin"] = maya_render_plugin diff --git a/openpype/hosts/maya/plugins/publish/extract_look.py b/openpype/hosts/maya/plugins/publish/extract_look.py index 79488a372c..bdd061578e 100644 --- a/openpype/hosts/maya/plugins/publish/extract_look.py +++ b/openpype/hosts/maya/plugins/publish/extract_look.py @@ -1,13 +1,14 @@ +# -*- coding: utf-8 -*- +"""Maya look extractor.""" import os import sys import json -import copy import tempfile import contextlib import subprocess from collections import OrderedDict -from maya import cmds +from maya import cmds # noqa import pyblish.api import avalon.maya @@ -22,23 +23,38 @@ HARDLINK = 2 def find_paths_by_hash(texture_hash): - # Find the texture hash key in the dictionary and all paths that - # originate from it. + """Find the texture hash key in the dictionary. + + All paths that originate from it. + + Args: + texture_hash (str): Hash of the texture. + + Return: + str: path to texture if found. + + """ key = "data.sourceHashes.{0}".format(texture_hash) return io.distinct(key, {"type": "version"}) def maketx(source, destination, *args): - """Make .tx using maketx with some default settings. + """Make `.tx` using `maketx` with some default settings. + The settings are based on default as used in Arnold's txManager in the scene. This function requires the `maketx` executable to be on the `PATH`. + Args: source (str): Path to source file. destination (str): Writing destination path. - """ + *args: Additional arguments for `maketx`. + Returns: + str: Output of `maketx` command. + + """ cmd = [ "maketx", "-v", # verbose @@ -56,7 +72,7 @@ def maketx(source, destination, *args): cmd = " ".join(cmd) - CREATE_NO_WINDOW = 0x08000000 + CREATE_NO_WINDOW = 0x08000000 # noqa kwargs = dict(args=cmd, stderr=subprocess.STDOUT) if sys.platform == "win32": @@ -118,12 +134,58 @@ class ExtractLook(openpype.api.Extractor): hosts = ["maya"] families = ["look"] order = pyblish.api.ExtractorOrder + 0.2 + scene_type = "ma" + + @staticmethod + def get_renderer_name(): + """Get renderer name from Maya. + + Returns: + str: Renderer name. + + """ + renderer = cmds.getAttr( + "defaultRenderGlobals.currentRenderer" + ).lower() + # handle various renderman names + if renderer.startswith("renderman"): + renderer = "renderman" + return renderer + + def get_maya_scene_type(self, instance): + """Get Maya scene type from settings. + + Args: + instance (pyblish.api.Instance): Instance with collected + project settings. + + """ + ext_mapping = ( + instance.context.data["project_settings"]["maya"]["ext_mapping"] + ) + if ext_mapping: + self.log.info("Looking in settings for scene type ...") + # use extension mapping for first family found + for family in self.families: + try: + self.scene_type = ext_mapping[family] + self.log.info( + "Using {} as scene type".format(self.scene_type)) + break + except KeyError: + # no preset found + pass def process(self, instance): + """Plugin entry point. + Args: + instance: Instance to process. + + """ # Define extract output file path dir_path = self.staging_dir(instance) - maya_fname = "{0}.ma".format(instance.name) + maya_fname = "{0}.{1}".format(instance.name, self.scene_type) json_fname = "{0}.json".format(instance.name) # Make texture dump folder @@ -148,7 +210,7 @@ class ExtractLook(openpype.api.Extractor): # Collect all unique files used in the resources files = set() - files_metadata = dict() + files_metadata = {} for resource in resources: # Preserve color space values (force value after filepath change) # This will also trigger in the same order at end of context to @@ -162,35 +224,33 @@ class ExtractLook(openpype.api.Extractor): # files.update(os.path.normpath(f)) # Process the resource files - transfers = list() - hardlinks = list() - hashes = dict() - forceCopy = instance.data.get("forceCopy", False) + transfers = [] + hardlinks = [] + hashes = {} + force_copy = instance.data.get("forceCopy", False) self.log.info(files) for filepath in files_metadata: - cspace = files_metadata[filepath]["color_space"] - linearise = False - if do_maketx: - if cspace == "sRGB": - linearise = True - # set its file node to 'raw' as tx will be linearized - files_metadata[filepath]["color_space"] = "raw" + linearize = False + if do_maketx and files_metadata[filepath]["color_space"] == "sRGB": # noqa: E501 + linearize = True + # set its file node to 'raw' as tx will be linearized + files_metadata[filepath]["color_space"] = "raw" - source, mode, hash = self._process_texture( + source, mode, texture_hash = self._process_texture( filepath, do_maketx, staging=dir_path, - linearise=linearise, - force=forceCopy + linearize=linearize, + force=force_copy ) destination = self.resource_destination(instance, source, do_maketx) # Force copy is specified. - if forceCopy: + if force_copy: mode = COPY if mode == COPY: @@ -202,10 +262,10 @@ class ExtractLook(openpype.api.Extractor): # Store the hashes from hash to destination to include in the # database - hashes[hash] = destination + hashes[texture_hash] = destination # Remap the resources to the destination path (change node attributes) - destinations = dict() + destinations = {} remap = OrderedDict() # needs to be ordered, see color space values for resource in resources: source = os.path.normpath(resource["source"]) @@ -222,7 +282,7 @@ class ExtractLook(openpype.api.Extractor): color_space_attr = resource["node"] + ".colorSpace" color_space = cmds.getAttr(color_space_attr) if files_metadata[source]["color_space"] == "raw": - # set colorpsace to raw if we linearized it + # set color space to raw if we linearized it color_space = "Raw" # Remap file node filename to destination attr = resource["attribute"] @@ -267,11 +327,11 @@ class ExtractLook(openpype.api.Extractor): json.dump(data, f) if "files" not in instance.data: - instance.data["files"] = list() + instance.data["files"] = [] if "hardlinks" not in instance.data: - instance.data["hardlinks"] = list() + instance.data["hardlinks"] = [] if "transfers" not in instance.data: - instance.data["transfers"] = list() + instance.data["transfers"] = [] instance.data["files"].append(maya_fname) instance.data["files"].append(json_fname) @@ -311,14 +371,26 @@ class ExtractLook(openpype.api.Extractor): maya_path)) def resource_destination(self, instance, filepath, do_maketx): - anatomy = instance.context.data["anatomy"] + """Get resource destination path. + This is utility function to change path if resource file name is + changed by some external tool like `maketx`. + + Args: + instance: Current Instance. + filepath (str): Resource path + do_maketx (bool): Flag if resource is processed by `maketx`. + + Returns: + str: Path to resource file + + """ resources_dir = instance.data["resourcesDir"] # Compute destination location basename, ext = os.path.splitext(os.path.basename(filepath)) - # If maketx then the texture will always end with .tx + # If `maketx` then the texture will always end with .tx if do_maketx: ext = ".tx" @@ -326,7 +398,7 @@ class ExtractLook(openpype.api.Extractor): resources_dir, basename + ext ) - def _process_texture(self, filepath, do_maketx, staging, linearise, force): + def _process_texture(self, filepath, do_maketx, staging, linearize, force): """Process a single texture file on disk for publishing. This will: 1. Check whether it's already published, if so it will do hardlink @@ -363,7 +435,7 @@ class ExtractLook(openpype.api.Extractor): # Produce .tx file in staging if source file is not .tx converted = os.path.join(staging, "resources", fname + ".tx") - if linearise: + if linearize: self.log.info("tx: converting sRGB -> linear") colorconvert = "--colorconvert sRGB linear" else: diff --git a/openpype/hosts/maya/plugins/publish/extract_redshift_proxy.py b/openpype/hosts/maya/plugins/publish/extract_redshift_proxy.py new file mode 100644 index 0000000000..7c9e201986 --- /dev/null +++ b/openpype/hosts/maya/plugins/publish/extract_redshift_proxy.py @@ -0,0 +1,82 @@ +# -*- coding: utf-8 -*- +"""Redshift Proxy extractor.""" +import os + +import avalon.maya +import openpype.api + +from maya import cmds + + +class ExtractRedshiftProxy(openpype.api.Extractor): + """Extract the content of the instance to a redshift proxy file.""" + + label = "Redshift Proxy (.rs)" + hosts = ["maya"] + families = ["redshiftproxy"] + + def process(self, instance): + """Extractor entry point.""" + + staging_dir = self.staging_dir(instance) + file_name = "{}.rs".format(instance.name) + file_path = os.path.join(staging_dir, file_name) + + anim_on = instance.data["animation"] + rs_options = "exportConnectivity=0;enableCompression=1;keepUnused=0;" + repr_files = file_name + + if not anim_on: + # Remove animation information because it is not required for + # non-animated subsets + instance.data.pop("proxyFrameStart", None) + instance.data.pop("proxyFrameEnd", None) + + else: + start_frame = instance.data["proxyFrameStart"] + end_frame = instance.data["proxyFrameEnd"] + rs_options = "{}startFrame={};endFrame={};frameStep={};".format( + rs_options, start_frame, + end_frame, instance.data["proxyFrameStep"] + ) + + root, ext = os.path.splitext(file_path) + # Padding is taken from number of digits of the end_frame. + # Not sure where Redshift is taking it. + repr_files = [ + "{}.{}{}".format(root, str(frame).rjust(4, "0"), ext) # noqa: E501 + for frame in range( + int(start_frame), + int(end_frame) + 1, + int(instance.data["proxyFrameStep"]), + )] + # vertex_colors = instance.data.get("vertexColors", False) + + # Write out rs file + self.log.info("Writing: '%s'" % file_path) + with avalon.maya.maintained_selection(): + cmds.select(instance.data["setMembers"], noExpand=True) + cmds.file(file_path, + pr=False, + force=True, + type="Redshift Proxy", + exportSelected=True, + options=rs_options) + + if "representations" not in instance.data: + instance.data["representations"] = [] + + self.log.debug("Files: {}".format(repr_files)) + + representation = { + 'name': 'rs', + 'ext': 'rs', + 'files': repr_files, + "stagingDir": staging_dir, + } + if anim_on: + representation["frameStart"] = instance.data["proxyFrameStart"] + instance.data["representations"].append(representation) + + self.log.info("Extracted instance '%s' to: %s" + % (instance.name, staging_dir)) diff --git a/openpype/hosts/maya/plugins/publish/extract_vrayscene.py b/openpype/hosts/maya/plugins/publish/extract_vrayscene.py index d3a3df6b1c..c9edfc8343 100644 --- a/openpype/hosts/maya/plugins/publish/extract_vrayscene.py +++ b/openpype/hosts/maya/plugins/publish/extract_vrayscene.py @@ -5,7 +5,7 @@ import re import avalon.maya import openpype.api -from openpype.hosts.maya.render_setup_tools import export_in_rs_layer +from openpype.hosts.maya.api.render_setup_tools import export_in_rs_layer from maya import cmds diff --git a/openpype/hosts/maya/plugins/publish/validate_look_sets.py b/openpype/hosts/maya/plugins/publish/validate_look_sets.py index 48431d0906..5e737ca876 100644 --- a/openpype/hosts/maya/plugins/publish/validate_look_sets.py +++ b/openpype/hosts/maya/plugins/publish/validate_look_sets.py @@ -73,8 +73,10 @@ class ValidateLookSets(pyblish.api.InstancePlugin): # check if any objectSets are not present ion the relationships missing_sets = [s for s in sets if s not in relationships] if missing_sets: - for set in missing_sets: - if '_SET' not in set: + for missing_set in missing_sets: + cls.log.debug(missing_set) + + if '_SET' not in missing_set: # A set of this node is not coming along, this is wrong! cls.log.error("Missing sets '{}' for node " "'{}'".format(missing_sets, node)) @@ -82,8 +84,8 @@ class ValidateLookSets(pyblish.api.InstancePlugin): continue # Ensure the node is in the sets that are collected - for shaderset, data in relationships.items(): - if shaderset not in sets: + for shader_set, data in relationships.items(): + if shader_set not in sets: # no need to check for a set if the node # isn't in it anyway continue @@ -94,7 +96,7 @@ class ValidateLookSets(pyblish.api.InstancePlugin): # The node is not found in the collected set # relationships cls.log.error("Missing '{}' in collected set node " - "'{}'".format(node, shaderset)) + "'{}'".format(node, shader_set)) invalid.append(node) continue diff --git a/openpype/hosts/maya/plugins/publish/validate_unreal_mesh_triangulated.py b/openpype/hosts/maya/plugins/publish/validate_unreal_mesh_triangulated.py index 1c6aa3078e..b2ef174374 100644 --- a/openpype/hosts/maya/plugins/publish/validate_unreal_mesh_triangulated.py +++ b/openpype/hosts/maya/plugins/publish/validate_unreal_mesh_triangulated.py @@ -8,7 +8,7 @@ import openpype.api class ValidateUnrealMeshTriangulated(pyblish.api.InstancePlugin): """Validate if mesh is made of triangles for Unreal Engine""" - order = openpype.api.ValidateMeshOder + order = openpype.api.ValidateMeshOrder hosts = ["maya"] families = ["unrealStaticMesh"] category = "geometry" diff --git a/openpype/hosts/maya/startup/userSetup.py b/openpype/hosts/maya/startup/userSetup.py index d556a89fa3..6d27c66882 100644 --- a/openpype/hosts/maya/startup/userSetup.py +++ b/openpype/hosts/maya/startup/userSetup.py @@ -10,7 +10,6 @@ print("starting OpenPype usersetup") settings = get_project_settings(os.environ['AVALON_PROJECT']) shelf_preset = settings['maya'].get('project_shelf') - if shelf_preset: project = os.environ["AVALON_PROJECT"] @@ -23,7 +22,7 @@ if shelf_preset: print(import_string) exec(import_string) -cmds.evalDeferred("mlib.shelf(name=shelf_preset['name'], iconPath=icon_path, preset=shelf_preset)") + cmds.evalDeferred("mlib.shelf(name=shelf_preset['name'], iconPath=icon_path, preset=shelf_preset)") print("finished OpenPype usersetup") diff --git a/openpype/hosts/nuke/api/__init__.py b/openpype/hosts/nuke/api/__init__.py index 25504aa12b..bd7a95f916 100644 --- a/openpype/hosts/nuke/api/__init__.py +++ b/openpype/hosts/nuke/api/__init__.py @@ -1,18 +1,12 @@ import os -import sys import nuke -from avalon import api as avalon -from openpype.tools import workfiles -from pyblish import api as pyblish -from openpype.api import Logger -import openpype.hosts.nuke +import avalon.api +import pyblish.api +import openpype from . import lib, menu - -self = sys.modules[__name__] -self.workfiles_launched = False -log = Logger().get_logger(__name__) +log = openpype.api.Logger().get_logger(__name__) AVALON_CONFIG = os.getenv("AVALON_CONFIG", "pype") HOST_DIR = os.path.dirname(os.path.abspath(openpype.hosts.nuke.__file__)) @@ -25,7 +19,7 @@ INVENTORY_PATH = os.path.join(PLUGINS_DIR, "inventory") # registering pyblish gui regarding settings in presets if os.getenv("PYBLISH_GUI", None): - pyblish.register_gui(os.getenv("PYBLISH_GUI", None)) + pyblish.api.register_gui(os.getenv("PYBLISH_GUI", None)) def reload_config(): @@ -61,15 +55,16 @@ def install(): ''' log.info("Registering Nuke plug-ins..") - pyblish.register_plugin_path(PUBLISH_PATH) - avalon.register_plugin_path(avalon.Loader, LOAD_PATH) - avalon.register_plugin_path(avalon.Creator, CREATE_PATH) - avalon.register_plugin_path(avalon.InventoryAction, INVENTORY_PATH) + pyblish.api.register_plugin_path(PUBLISH_PATH) + avalon.api.register_plugin_path(avalon.api.Loader, LOAD_PATH) + avalon.api.register_plugin_path(avalon.api.Creator, CREATE_PATH) + avalon.api.register_plugin_path(avalon.api.InventoryAction, INVENTORY_PATH) # Register Avalon event for workfiles loading. - avalon.on("workio.open_file", lib.check_inventory_versions) + avalon.api.on("workio.open_file", lib.check_inventory_versions) - pyblish.register_callback("instanceToggled", on_pyblish_instance_toggled) + pyblish.api.register_callback( + "instanceToggled", on_pyblish_instance_toggled) workfile_settings = lib.WorkfileSettings() # Disable all families except for the ones we explicitly want to see family_states = [ @@ -79,39 +74,27 @@ def install(): "gizmo" ] - avalon.data["familiesStateDefault"] = False - avalon.data["familiesStateToggled"] = family_states - - # Workfiles. - launch_workfiles = os.environ.get("WORKFILES_STARTUP") - - if launch_workfiles: - nuke.addOnCreate(launch_workfiles_app, nodeClass="Root") + avalon.api.data["familiesStateDefault"] = False + avalon.api.data["familiesStateToggled"] = family_states # Set context settings. nuke.addOnCreate(workfile_settings.set_context_settings, nodeClass="Root") - # nuke.addOnCreate(workfile_settings.set_favorites, nodeClass="Root") - + nuke.addOnCreate(workfile_settings.set_favorites, nodeClass="Root") + nuke.addOnCreate(lib.open_last_workfile, nodeClass="Root") + nuke.addOnCreate(lib.launch_workfiles_app, nodeClass="Root") menu.install() -def launch_workfiles_app(): - '''Function letting start workfiles after start of host - ''' - if not self.workfiles_launched: - self.workfiles_launched = True - workfiles.show(os.environ["AVALON_WORKDIR"]) - - def uninstall(): '''Uninstalling host's integration ''' log.info("Deregistering Nuke plug-ins..") - pyblish.deregister_plugin_path(PUBLISH_PATH) - avalon.deregister_plugin_path(avalon.Loader, LOAD_PATH) - avalon.deregister_plugin_path(avalon.Creator, CREATE_PATH) + pyblish.api.deregister_plugin_path(PUBLISH_PATH) + avalon.api.deregister_plugin_path(avalon.api.Loader, LOAD_PATH) + avalon.api.deregister_plugin_path(avalon.api.Creator, CREATE_PATH) - pyblish.deregister_callback("instanceToggled", on_pyblish_instance_toggled) + pyblish.api.deregister_callback( + "instanceToggled", on_pyblish_instance_toggled) reload_config() menu.uninstall() diff --git a/openpype/hosts/nuke/api/lib.py b/openpype/hosts/nuke/api/lib.py index 8618b03cdc..7ef5401292 100644 --- a/openpype/hosts/nuke/api/lib.py +++ b/openpype/hosts/nuke/api/lib.py @@ -1,11 +1,18 @@ import os import re import sys +import six +import platform from collections import OrderedDict + from avalon import api, io, lib +from avalon.tools import workfiles import avalon.nuke from avalon.nuke import lib as anlib +from avalon.nuke import ( + save_file, open_file +) from openpype.api import ( Logger, Anatomy, @@ -13,7 +20,7 @@ from openpype.api import ( get_anatomy_settings, get_hierarchy, get_asset, - config, + get_current_project_settings, ApplicationManager ) @@ -23,34 +30,34 @@ from .utils import set_context_favorites log = Logger().get_logger(__name__) -self = sys.modules[__name__] -self._project = None -self._node_tab_name = "{}".format(os.getenv("AVALON_LABEL") or "Avalon") +opnl = sys.modules[__name__] +opnl._project = None +opnl.project_name = os.getenv("AVALON_PROJECT") +opnl.workfiles_launched = False +opnl._node_tab_name = "{}".format(os.getenv("AVALON_LABEL") or "Avalon") -def get_node_imageio_setting(**kwarg): + +def get_created_node_imageio_setting(**kwarg): ''' Get preset data for dataflow (fileType, compression, bitDepth) ''' - log.info(kwarg) - host = str(kwarg.get("host", "nuke")) + log.debug(kwarg) nodeclass = kwarg.get("nodeclass", None) creator = kwarg.get("creator", None) - project_name = os.getenv("AVALON_PROJECT") - assert any([host, nodeclass]), nuke.message( + assert any([creator, nodeclass]), nuke.message( "`{}`: Missing mandatory kwargs `host`, `cls`".format(__file__)) - imageio_nodes = (get_anatomy_settings(project_name) - ["imageio"] - .get(host, None) - ["nodes"] - ["requiredNodes"] - ) + imageio = get_anatomy_settings(opnl.project_name)["imageio"] + imageio_nodes = imageio["nuke"]["nodes"]["requiredNodes"] + imageio_node = None for node in imageio_nodes: log.info(node) - if node["nukeNodeClass"] == nodeclass: - if creator in node["plugins"]: - imageio_node = node + if (node["nukeNodeClass"] != nodeclass) and ( + creator not in node["plugins"]): + continue + + imageio_node = node log.info("ImageIO node: {}".format(imageio_node)) return imageio_node @@ -59,12 +66,9 @@ def get_node_imageio_setting(**kwarg): def get_imageio_input_colorspace(filename): ''' Get input file colorspace based on regex in settings. ''' - imageio_regex_inputs = (get_anatomy_settings(os.getenv("AVALON_PROJECT")) - ["imageio"] - ["nuke"] - ["regexInputs"] - ["inputs"] - ) + imageio_regex_inputs = ( + get_anatomy_settings(opnl.project_name) + ["imageio"]["nuke"]["regexInputs"]["inputs"]) preset_clrsp = None for regexInput in imageio_regex_inputs: @@ -96,40 +100,39 @@ def check_inventory_versions(): """ # get all Loader nodes by avalon attribute metadata for each in nuke.allNodes(): - if each.Class() == 'Read': - container = avalon.nuke.parse_container(each) + container = avalon.nuke.parse_container(each) - if container: - node = nuke.toNode(container["objectName"]) - avalon_knob_data = avalon.nuke.read( - node) + if container: + node = nuke.toNode(container["objectName"]) + avalon_knob_data = avalon.nuke.read( + node) - # get representation from io - representation = io.find_one({ - "type": "representation", - "_id": io.ObjectId(avalon_knob_data["representation"]) - }) + # get representation from io + representation = io.find_one({ + "type": "representation", + "_id": io.ObjectId(avalon_knob_data["representation"]) + }) - # Get start frame from version data - version = io.find_one({ - "type": "version", - "_id": representation["parent"] - }) + # Get start frame from version data + version = io.find_one({ + "type": "version", + "_id": representation["parent"] + }) - # get all versions in list - versions = io.find({ - "type": "version", - "parent": version["parent"] - }).distinct('name') + # get all versions in list + versions = io.find({ + "type": "version", + "parent": version["parent"] + }).distinct('name') - max_version = max(versions) + max_version = max(versions) - # check the available version and do match - # change color of node if not max verion - if version.get("name") not in [max_version]: - node["tile_color"].setValue(int("0xd84f20ff", 16)) - else: - node["tile_color"].setValue(int("0x4ecd25ff", 16)) + # check the available version and do match + # change color of node if not max verion + if version.get("name") not in [max_version]: + node["tile_color"].setValue(int("0xd84f20ff", 16)) + else: + node["tile_color"].setValue(int("0x4ecd25ff", 16)) def writes_version_sync(): @@ -145,34 +148,33 @@ def writes_version_sync(): except Exception: return - for each in nuke.allNodes(): - if each.Class() == 'Write': - # check if the node is avalon tracked - if self._node_tab_name not in each.knobs(): + for each in nuke.allNodes(filter="Write"): + # check if the node is avalon tracked + if opnl._node_tab_name not in each.knobs(): + continue + + avalon_knob_data = avalon.nuke.read( + each) + + try: + if avalon_knob_data['families'] not in ["render"]: + log.debug(avalon_knob_data['families']) continue - avalon_knob_data = avalon.nuke.read( - each) + node_file = each['file'].value() - try: - if avalon_knob_data['families'] not in ["render"]: - log.debug(avalon_knob_data['families']) - continue + node_version = "v" + get_version_from_path(node_file) + log.debug("node_version: {}".format(node_version)) - node_file = each['file'].value() - - node_version = "v" + get_version_from_path(node_file) - log.debug("node_version: {}".format(node_version)) - - node_new_file = node_file.replace(node_version, new_version) - each['file'].setValue(node_new_file) - if not os.path.isdir(os.path.dirname(node_new_file)): - log.warning("Path does not exist! I am creating it.") - os.makedirs(os.path.dirname(node_new_file)) - except Exception as e: - log.warning( - "Write node: `{}` has no version in path: {}".format( - each.name(), e)) + node_new_file = node_file.replace(node_version, new_version) + each['file'].setValue(node_new_file) + if not os.path.isdir(os.path.dirname(node_new_file)): + log.warning("Path does not exist! I am creating it.") + os.makedirs(os.path.dirname(node_new_file)) + except Exception as e: + log.warning( + "Write node: `{}` has no version in path: {}".format( + each.name(), e)) def version_up_script(): @@ -193,24 +195,22 @@ def check_subsetname_exists(nodes, subset_name): Returns: bool: True of False """ - result = next((True for n in nodes - if subset_name in avalon.nuke.read(n).get("subset", "")), False) - return result + return next((True for n in nodes + if subset_name in avalon.nuke.read(n).get("subset", "")), + False) def get_render_path(node): ''' Generate Render path from presets regarding avalon knob data ''' - data = dict() - data['avalon'] = avalon.nuke.read( - node) - + data = {'avalon': avalon.nuke.read(node)} data_preset = { - "class": data['avalon']['family'], - "preset": data['avalon']['families'] + "nodeclass": data['avalon']['family'], + "families": [data['avalon']['families']], + "creator": data['avalon']['creator'] } - nuke_imageio_writes = get_node_imageio_setting(**data_preset) + nuke_imageio_writes = get_created_node_imageio_setting(**data_preset) application = lib.get_application(os.environ["AVALON_APP_NAME"]) data.update({ @@ -316,7 +316,7 @@ def create_write_node(name, data, input=None, prenodes=None, review=True): node (obj): group node with avalon data as Knobs ''' - imageio_writes = get_node_imageio_setting(**data) + imageio_writes = get_created_node_imageio_setting(**data) app_manager = ApplicationManager() app_name = os.environ.get("AVALON_APP_NAME") if app_name: @@ -359,8 +359,7 @@ def create_write_node(name, data, input=None, prenodes=None, review=True): # adding dataflow template log.debug("imageio_writes: `{}`".format(imageio_writes)) for knob in imageio_writes["knobs"]: - if knob["name"] not in ["_id", "_previous"]: - _data.update({knob["name"]: knob["value"]}) + _data.update({knob["name"]: knob["value"]}) _data = anlib.fix_data_for_node_create(_data) @@ -382,16 +381,19 @@ def create_write_node(name, data, input=None, prenodes=None, review=True): "inputName": input.name()}) prev_node = nuke.createNode( "Input", "name {}".format(input.name())) + prev_node.hideControlPanel() else: # generic input node connected to nothing prev_node = nuke.createNode( "Input", "name {}".format("rgba")) + prev_node.hideControlPanel() # creating pre-write nodes `prenodes` if prenodes: for name, klass, properties, set_output_to in prenodes: # create node now_node = nuke.createNode(klass, "name {}".format(name)) + now_node.hideControlPanel() # add data to knob for k, v in properties: @@ -413,17 +415,21 @@ def create_write_node(name, data, input=None, prenodes=None, review=True): for i, node_name in enumerate(set_output_to): input_node = nuke.createNode( "Input", "name {}".format(node_name)) + input_node.hideControlPanel() connections.append({ "node": nuke.toNode(node_name), "inputName": node_name}) now_node.setInput(1, input_node) + elif isinstance(set_output_to, str): input_node = nuke.createNode( "Input", "name {}".format(node_name)) + input_node.hideControlPanel() connections.append({ "node": nuke.toNode(set_output_to), "inputName": set_output_to}) now_node.setInput(0, input_node) + else: now_node.setInput(0, prev_node) @@ -435,7 +441,7 @@ def create_write_node(name, data, input=None, prenodes=None, review=True): "inside_{}".format(name), **_data ) - + write_node.hideControlPanel() # connect to previous node now_node.setInput(0, prev_node) @@ -443,6 +449,7 @@ def create_write_node(name, data, input=None, prenodes=None, review=True): prev_node = now_node now_node = nuke.createNode("Output", "name Output1") + now_node.hideControlPanel() # connect to previous node now_node.setInput(0, prev_node) @@ -490,7 +497,7 @@ def create_write_node(name, data, input=None, prenodes=None, review=True): add_deadline_tab(GN) # open the our Tab as default - GN[self._node_tab_name].setFlag(0) + GN[opnl._node_tab_name].setFlag(0) # set tile color tile_color = _data.get("tile_color", "0xff0000ff") @@ -613,7 +620,7 @@ class WorkfileSettings(object): root_node=None, nodes=None, **kwargs): - self._project = kwargs.get( + opnl._project = kwargs.get( "project") or io.find_one({"type": "project"}) self._asset = kwargs.get("asset_name") or api.Session["AVALON_ASSET"] self._asset_entity = get_asset(self._asset) @@ -656,8 +663,7 @@ class WorkfileSettings(object): ] erased_viewers = [] - for v in [n for n in self._nodes - if "Viewer" in n.Class()]: + for v in nuke.allNodes(filter="Viewer"): v['viewerProcess'].setValue(str(viewer_dict["viewerProcess"])) if str(viewer_dict["viewerProcess"]) \ not in v['viewerProcess'].value(): @@ -701,7 +707,7 @@ class WorkfileSettings(object): log.error(msg) nuke.message(msg) - log.debug(">> root_dict: {}".format(root_dict)) + log.warning(">> root_dict: {}".format(root_dict)) # first set OCIO if self._root_node["colorManagement"].value() \ @@ -723,41 +729,41 @@ class WorkfileSettings(object): # third set ocio custom path if root_dict.get("customOCIOConfigPath"): - self._root_node["customOCIOConfigPath"].setValue( - str(root_dict["customOCIOConfigPath"]).format( - **os.environ - ).replace("\\", "/") - ) - log.debug("nuke.root()['{}'] changed to: {}".format( - "customOCIOConfigPath", root_dict["customOCIOConfigPath"])) - root_dict.pop("customOCIOConfigPath") + unresolved_path = root_dict["customOCIOConfigPath"] + ocio_paths = unresolved_path[platform.system().lower()] + + resolved_path = None + for ocio_p in ocio_paths: + resolved_path = str(ocio_p).format(**os.environ) + if not os.path.exists(resolved_path): + continue + + if resolved_path: + self._root_node["customOCIOConfigPath"].setValue( + str(resolved_path).replace("\\", "/") + ) + log.debug("nuke.root()['{}'] changed to: {}".format( + "customOCIOConfigPath", resolved_path)) + root_dict.pop("customOCIOConfigPath") # then set the rest for knob, value in root_dict.items(): + # skip unfilled ocio config path + # it will be dict in value + if isinstance(value, dict): + continue if self._root_node[knob].value() not in value: self._root_node[knob].setValue(str(value)) log.debug("nuke.root()['{}'] changed to: {}".format( knob, value)) - def set_writes_colorspace(self, write_dict): + def set_writes_colorspace(self): ''' Adds correct colorspace to write node dict - Arguments: - write_dict (dict): nuke write node as dictionary - ''' - # scene will have fixed colorspace following presets for the project - if not isinstance(write_dict, dict): - msg = "set_root_colorspace(): argument should be dictionary" - log.error(msg) - return - from avalon.nuke import read - for node in nuke.allNodes(): - - if node.Class() in ["Viewer", "Dot"]: - continue + for node in nuke.allNodes(filter="Group"): # get data from avalon knob avalon_knob_data = read(node) @@ -773,49 +779,63 @@ class WorkfileSettings(object): if avalon_knob_data.get("families"): families.append(avalon_knob_data.get("families")) - # except disabled nodes but exclude backdrops in test - for fmly, knob in write_dict.items(): - write = None - if (fmly in families): - # Add all nodes in group instances. - if node.Class() == "Group": - node.begin() - for x in nuke.allNodes(): - if x.Class() == "Write": - write = x - node.end() - elif node.Class() == "Write": - write = node - else: - log.warning("Wrong write node Class") + data_preset = { + "nodeclass": avalon_knob_data["family"], + "families": families, + "creator": avalon_knob_data['creator'] + } - write["colorspace"].setValue(str(knob["colorspace"])) - log.info( - "Setting `{0}` to `{1}`".format( - write.name(), - knob["colorspace"])) + nuke_imageio_writes = get_created_node_imageio_setting( + **data_preset) - def set_reads_colorspace(self, reads): + log.debug("nuke_imageio_writes: `{}`".format(nuke_imageio_writes)) + + if not nuke_imageio_writes: + return + + write_node = None + + # get into the group node + node.begin() + for x in nuke.allNodes(): + if x.Class() == "Write": + write_node = x + node.end() + + if not write_node: + return + + # write all knobs to node + for knob in nuke_imageio_writes["knobs"]: + value = knob["value"] + if isinstance(value, six.text_type): + value = str(value) + if str(value).startswith("0x"): + value = int(value, 16) + + write_node[knob["name"]].setValue(value) + + + def set_reads_colorspace(self, read_clrs_inputs): """ Setting colorspace to Read nodes Looping trought all read nodes and tries to set colorspace based on regex rules in presets """ - changes = dict() + changes = {} for n in nuke.allNodes(): file = nuke.filename(n) - if not n.Class() == "Read": + if n.Class() != "Read": continue - # load nuke presets for Read's colorspace - read_clrs_presets = config.get_init_presets()["colorspace"].get( - "nuke", {}).get("read", {}) - # check if any colorspace presets for read is mathing - preset_clrsp = next((read_clrs_presets[k] - for k in read_clrs_presets - if bool(re.search(k, file))), - None) + preset_clrsp = None + + for input in read_clrs_inputs: + if not bool(re.search(input["regex"], file)): + continue + preset_clrsp = input["colorspace"] + log.debug(preset_clrsp) if preset_clrsp is not None: current = n["colorspace"].value() @@ -849,13 +869,15 @@ class WorkfileSettings(object): def set_colorspace(self): ''' Setting colorpace following presets ''' - nuke_colorspace = config.get_init_presets( - )["colorspace"].get("nuke", None) + # get imageio + imageio = get_anatomy_settings(opnl.project_name)["imageio"] + nuke_colorspace = imageio["nuke"] try: - self.set_root_colorspace(nuke_colorspace["root"]) + self.set_root_colorspace(nuke_colorspace["workfile"]) except AttributeError: - msg = "set_colorspace(): missing `root` settings in template" + msg = "set_colorspace(): missing `workfile` settings in template" + nuke.message(msg) try: self.set_viewers_colorspace(nuke_colorspace["viewer"]) @@ -865,15 +887,14 @@ class WorkfileSettings(object): log.error(msg) try: - self.set_writes_colorspace(nuke_colorspace["write"]) - except AttributeError: - msg = "set_colorspace(): missing `write` settings in template" - nuke.message(msg) - log.error(msg) + self.set_writes_colorspace() + except AttributeError as _error: + nuke.message(_error) + log.error(_error) - reads = nuke_colorspace.get("read") - if reads: - self.set_reads_colorspace(reads) + read_clrs_inputs = nuke_colorspace["regexInputs"].get("inputs", []) + if read_clrs_inputs: + self.set_reads_colorspace(read_clrs_inputs) try: for key in nuke_colorspace: @@ -1055,15 +1076,14 @@ class WorkfileSettings(object): def set_favorites(self): work_dir = os.getenv("AVALON_WORKDIR") asset = os.getenv("AVALON_ASSET") - project = os.getenv("AVALON_PROJECT") favorite_items = OrderedDict() # project # get project's root and split to parts projects_root = os.path.normpath(work_dir.split( - project)[0]) + opnl.project_name)[0]) # add project name - project_dir = os.path.join(projects_root, project) + "/" + project_dir = os.path.join(projects_root, opnl.project_name) + "/" # add to favorites favorite_items.update({"Project dir": project_dir.replace("\\", "/")}) @@ -1113,13 +1133,13 @@ def get_write_node_template_attr(node): data['avalon'] = avalon.nuke.read( node) data_preset = { - "class": data['avalon']['family'], - "families": data['avalon']['families'], - "preset": data['avalon']['families'] # omit < 2.0.0v + "nodeclass": data['avalon']['family'], + "families": [data['avalon']['families']], + "creator": data['avalon']['creator'] } # get template data - nuke_imageio_writes = get_node_imageio_setting(**data_preset) + nuke_imageio_writes = get_created_node_imageio_setting(**data_preset) # collecting correct data correct_data = OrderedDict({ @@ -1215,8 +1235,7 @@ class ExporterReview: """ anlib.reset_selection() ipn_orig = None - for v in [n for n in nuke.allNodes() - if "Viewer" == n.Class()]: + for v in nuke.allNodes(filter="Viewer"): ip = v['input_process'].getValue() ipn = v['input_process_node'].getValue() if "VIEWER_INPUT" not in ipn and ip: @@ -1616,3 +1635,41 @@ def find_free_space_to_paste_nodes( xpos = min(group_xpos) ypos = max(group_ypos) + abs(offset) return xpos, ypos + + +def launch_workfiles_app(): + '''Function letting start workfiles after start of host + ''' + # get state from settings + open_at_start = get_current_project_settings()["nuke"].get( + "general", {}).get("open_workfile_at_start") + + # return if none is defined + if not open_at_start: + return + + if not opnl.workfiles_launched: + opnl.workfiles_launched = True + workfiles.show(os.environ["AVALON_WORKDIR"]) + + +def open_last_workfile(): + # get state from settings + open_last_version = get_current_project_settings()["nuke"].get( + "general", {}).get("create_initial_workfile") + + log.info("Opening last workfile...") + last_workfile_path = os.environ.get("AVALON_LAST_WORKFILE") + + if not os.path.exists(last_workfile_path): + # return if none is defined + if not open_last_version: + return + + save_file(last_workfile_path) + else: + # to avoid looping of the callback, remove it! + nuke.removeOnCreate(open_last_workfile, nodeClass="Root") + + # open workfile + open_file(last_workfile_path) diff --git a/openpype/hosts/nuke/api/menu.py b/openpype/hosts/nuke/api/menu.py index 2317066528..021ea04159 100644 --- a/openpype/hosts/nuke/api/menu.py +++ b/openpype/hosts/nuke/api/menu.py @@ -26,9 +26,9 @@ def install(): menu.addCommand( name, workfiles.show, - index=(rm_item[0]) + index=2 ) - + menu.addSeparator(index=3) # replace reset resolution from avalon core to pype's name = "Reset Resolution" new_name = "Set Resolution" @@ -63,16 +63,7 @@ def install(): # add colorspace menu item name = "Set Colorspace" menu.addCommand( - name, lambda: WorkfileSettings().set_colorspace(), - index=(rm_item[0] + 2) - ) - log.debug("Adding menu item: {}".format(name)) - - # add workfile builder menu item - name = "Build Workfile" - menu.addCommand( - name, lambda: BuildWorkfile().process(), - index=(rm_item[0] + 7) + name, lambda: WorkfileSettings().set_colorspace() ) log.debug("Adding menu item: {}".format(name)) @@ -80,11 +71,20 @@ def install(): name = "Apply All Settings" menu.addCommand( name, - lambda: WorkfileSettings().set_context_settings(), - index=(rm_item[0] + 3) + lambda: WorkfileSettings().set_context_settings() ) log.debug("Adding menu item: {}".format(name)) + menu.addSeparator() + + # add workfile builder menu item + name = "Build Workfile" + menu.addCommand( + name, lambda: BuildWorkfile().process() + ) + log.debug("Adding menu item: {}".format(name)) + + # adding shortcuts add_shortcuts_from_presets() diff --git a/openpype/hosts/nuke/plugins/create/create_write_prerender.py b/openpype/hosts/nuke/plugins/create/create_write_prerender.py index 38d1a0c2ed..6e1a2ddd96 100644 --- a/openpype/hosts/nuke/plugins/create/create_write_prerender.py +++ b/openpype/hosts/nuke/plugins/create/create_write_prerender.py @@ -77,10 +77,14 @@ class CreateWritePrerender(plugin.PypeCreator): write_data = { "nodeclass": self.n_class, "families": [self.family], - "avalon": self.data, - "creator": self.__class__.__name__ + "avalon": self.data } + # add creator data + creator_data = {"creator": self.__class__.__name__} + self.data.update(creator_data) + write_data.update(creator_data) + if self.presets.get('fpath_template'): self.log.info("Adding template path from preset") write_data.update( diff --git a/openpype/hosts/nuke/plugins/create/create_write_render.py b/openpype/hosts/nuke/plugins/create/create_write_render.py index 72f851f19c..04983e9c75 100644 --- a/openpype/hosts/nuke/plugins/create/create_write_render.py +++ b/openpype/hosts/nuke/plugins/create/create_write_render.py @@ -80,10 +80,14 @@ class CreateWriteRender(plugin.PypeCreator): write_data = { "nodeclass": self.n_class, "families": [self.family], - "avalon": self.data, - "creator": self.__class__.__name__ + "avalon": self.data } + # add creator data + creator_data = {"creator": self.__class__.__name__} + self.data.update(creator_data) + write_data.update(creator_data) + if self.presets.get('fpath_template'): self.log.info("Adding template path from preset") write_data.update( diff --git a/openpype/hosts/nuke/plugins/load/load_mov.py b/openpype/hosts/nuke/plugins/load/load_mov.py index 92726913af..8b8c5d0c10 100644 --- a/openpype/hosts/nuke/plugins/load/load_mov.py +++ b/openpype/hosts/nuke/plugins/load/load_mov.py @@ -135,12 +135,14 @@ class LoadMov(api.Loader): read_name = self.node_name_template.format(**name_data) - # Create the Loader with the filename path set + read_node = nuke.createNode( + "Read", + "name {}".format(read_name) + ) + + # to avoid multiple undo steps for rest of process + # we will switch off undo-ing with viewer_update_and_undo_stop(): - read_node = nuke.createNode( - "Read", - "name {}".format(read_name) - ) read_node["file"].setValue(file) read_node["origfirst"].setValue(first) diff --git a/openpype/hosts/nuke/plugins/load/load_sequence.py b/openpype/hosts/nuke/plugins/load/load_sequence.py index df7aa55cd1..71f0b8c298 100644 --- a/openpype/hosts/nuke/plugins/load/load_sequence.py +++ b/openpype/hosts/nuke/plugins/load/load_sequence.py @@ -139,11 +139,15 @@ class LoadSequence(api.Loader): read_name = self.node_name_template.format(**name_data) # Create the Loader with the filename path set + + # TODO: it might be universal read to img/geo/camera + r = nuke.createNode( + "Read", + "name {}".format(read_name)) + + # to avoid multiple undo steps for rest of process + # we will switch off undo-ing with viewer_update_and_undo_stop(): - # TODO: it might be universal read to img/geo/camera - r = nuke.createNode( - "Read", - "name {}".format(read_name)) r["file"].setValue(file) # Set colorspace defined in version data diff --git a/openpype/hosts/nuke/plugins/publish/collect_slate_node.py b/openpype/hosts/nuke/plugins/publish/collect_slate_node.py index 9c7f1b5e95..4257ed3131 100644 --- a/openpype/hosts/nuke/plugins/publish/collect_slate_node.py +++ b/openpype/hosts/nuke/plugins/publish/collect_slate_node.py @@ -34,7 +34,8 @@ class CollectSlate(pyblish.api.InstancePlugin): if slate_node: instance.data["slateNode"] = slate_node instance.data["families"].append("slate") + instance.data["versionData"]["families"].append("slate") self.log.info( "Slate node is in node graph: `{}`".format(slate.name())) self.log.debug( - "__ instance: `{}`".format(instance)) + "__ instance.data: `{}`".format(instance.data)) diff --git a/openpype/hosts/nuke/plugins/publish/precollect_instances.py b/openpype/hosts/nuke/plugins/publish/precollect_instances.py index 2d25b29826..cdb0589525 100644 --- a/openpype/hosts/nuke/plugins/publish/precollect_instances.py +++ b/openpype/hosts/nuke/plugins/publish/precollect_instances.py @@ -55,11 +55,6 @@ class PreCollectNukeInstances(pyblish.api.ContextPlugin): families_ak = avalon_knob_data.get("families", []) families = list() - if families_ak: - families.append(families_ak.lower()) - - families.append(family) - # except disabled nodes but exclude backdrops in test if ("nukenodes" not in family) and (node["disable"].value()): continue @@ -80,31 +75,34 @@ class PreCollectNukeInstances(pyblish.api.ContextPlugin): # Add all nodes in group instances. if node.Class() == "Group": - # check if it is write node in family - if "write" in families: + # only alter families for render family + if "write" in families_ak.lower(): target = node["render"].value() if target == "Use existing frames": # Local rendering self.log.info("flagged for no render") - families.append("render") elif target == "Local": # Local rendering self.log.info("flagged for local render") - families.append("{}.local".format("render")) + families.append("{}.local".format(family)) + family = families_ak.lower() elif target == "On farm": # Farm rendering self.log.info("flagged for farm render") instance.data["transfer"] = False - families.append("{}.farm".format("render")) - if "render" in families: - families.remove("render") - family = "write" + families.append("{}.farm".format(family)) + family = families_ak.lower() node.begin() for i in nuke.allNodes(): instance.append(i) node.end() + if not families and families_ak and family not in [ + "render", "prerender"]: + families.append(families_ak.lower()) + + self.log.debug("__ family: `{}`".format(family)) self.log.debug("__ families: `{}`".format(families)) # Get format @@ -118,7 +116,9 @@ class PreCollectNukeInstances(pyblish.api.ContextPlugin): anlib.add_publish_knob(node) # sync workfile version - if not next((f for f in families + _families_test = [family] + families + self.log.debug("__ _families_test: `{}`".format(_families_test)) + if not next((f for f in _families_test if "prerender" in f), None) and self.sync_workfile_version: # get version to instance for integration diff --git a/openpype/hosts/nuke/plugins/publish/precollect_writes.py b/openpype/hosts/nuke/plugins/publish/precollect_writes.py index a519609f52..5eaac89e84 100644 --- a/openpype/hosts/nuke/plugins/publish/precollect_writes.py +++ b/openpype/hosts/nuke/plugins/publish/precollect_writes.py @@ -1,4 +1,5 @@ import os +import re import nuke import pyblish.api import openpype.api as pype @@ -14,11 +15,8 @@ class CollectNukeWrites(pyblish.api.InstancePlugin): hosts = ["nuke", "nukeassist"] families = ["write"] - # preset attributes - sync_workfile_version = True - def process(self, instance): - families = instance.data["families"] + _families_test = [instance.data["family"]] + instance.data["families"] node = None for x in instance: @@ -63,7 +61,7 @@ class CollectNukeWrites(pyblish.api.InstancePlugin): int(last_frame) ) - if [fm for fm in families + if [fm for fm in _families_test if fm in ["render", "prerender"]]: if "representations" not in instance.data: instance.data["representations"] = list() @@ -91,9 +89,9 @@ class CollectNukeWrites(pyblish.api.InstancePlugin): collected_frames_len)) # this will only run if slate frame is not already # rendered from previews publishes - if "slate" in instance.data["families"] \ + if "slate" in _families_test \ and (frame_length == collected_frames_len) \ - and ("prerender" not in instance.data["families"]): + and ("prerender" not in _families_test): frame_slate_str = "%0{}d".format( len(str(last_frame))) % (first_frame - 1) slate_frame = collected_frames[0].replace( @@ -107,8 +105,17 @@ class CollectNukeWrites(pyblish.api.InstancePlugin): self.log.debug("couldn't collect frames: {}".format(label)) # Add version data to instance + colorspace = node["colorspace"].value() + + # remove default part of the string + if "default (" in colorspace: + colorspace = re.sub(r"default.\(|\)", "", colorspace) + self.log.debug("colorspace: `{}`".format(colorspace)) + version_data = { - "colorspace": node["colorspace"].value(), + "families": [f.replace(".local", "").replace(".farm", "") + for f in _families_test if "write" not in f], + "colorspace": colorspace } group_node = [x for x in instance if x.Class() == "Group"][0] @@ -133,13 +140,12 @@ class CollectNukeWrites(pyblish.api.InstancePlugin): "frameStartHandle": first_frame, "frameEndHandle": last_frame, "outputType": output_type, - "families": families, - "colorspace": node["colorspace"].value(), + "colorspace": colorspace, "deadlineChunkSize": deadlineChunkSize, "deadlinePriority": deadlinePriority }) - if "prerender" in families: + if "prerender" in _families_test: instance.data.update({ "family": "prerender", "families": [] @@ -164,6 +170,4 @@ class CollectNukeWrites(pyblish.api.InstancePlugin): "filename": api.get_representation_path(repre_doc) }] - self.log.debug("families: {}".format(families)) - self.log.debug("instance.data: {}".format(instance.data)) diff --git a/openpype/hosts/nuke/plugins/publish/validate_rendered_frames.py b/openpype/hosts/nuke/plugins/publish/validate_rendered_frames.py index 21afc5313b..8b71aff1ac 100644 --- a/openpype/hosts/nuke/plugins/publish/validate_rendered_frames.py +++ b/openpype/hosts/nuke/plugins/publish/validate_rendered_frames.py @@ -5,23 +5,50 @@ import clique @pyblish.api.log -class RepairCollectionAction(pyblish.api.Action): - label = "Repair" +class RepairActionBase(pyblish.api.Action): on = "failed" icon = "wrench" + @staticmethod + def get_instance(context, plugin): + # Get the errored instances + failed = [] + for result in context.data["results"]: + if (result["error"] is not None and result["instance"] is not None + and result["instance"] not in failed): + failed.append(result["instance"]) + + # Apply pyblish.logic to get the instances for the plug-in + return pyblish.api.instances_by_plugin(failed, plugin) + + def repair_knob(self, instances, state): + for instance in instances: + files_remove = [os.path.join(instance.data["outputDir"], f) + for r in instance.data.get("representations", []) + for f in r.get("files", []) + ] + self.log.info("Files to be removed: {}".format(files_remove)) + for f in files_remove: + os.remove(f) + self.log.debug("removing file: {}".format(f)) + instance[0]["render"].setValue(state) + self.log.info("Rendering toggled to `{}`".format(state)) + + +class RepairCollectionActionToLocal(RepairActionBase): + label = "Repair > rerender with `Local` machine" + def process(self, context, plugin): - self.log.info(context[0][0]) - files_remove = [os.path.join(context[0].data["outputDir"], f) - for r in context[0].data.get("representations", []) - for f in r.get("files", []) - ] - self.log.info("Files to be removed: {}".format(files_remove)) - for f in files_remove: - os.remove(f) - self.log.debug("removing file: {}".format(f)) - context[0][0]["render"].setValue(True) - self.log.info("Rendering toggled ON") + instances = self.get_instance(context, plugin) + self.repair_knob(instances, "Local") + + +class RepairCollectionActionToFarm(RepairActionBase): + label = "Repair > rerender `On farm` with remote machines" + + def process(self, context, plugin): + instances = self.get_instance(context, plugin) + self.repair_knob(instances, "On farm") class ValidateRenderedFrames(pyblish.api.InstancePlugin): @@ -32,26 +59,28 @@ class ValidateRenderedFrames(pyblish.api.InstancePlugin): label = "Validate rendered frame" hosts = ["nuke", "nukestudio"] - actions = [RepairCollectionAction] + actions = [RepairCollectionActionToLocal, RepairCollectionActionToFarm] + def process(self, instance): - for repre in instance.data.get('representations'): + for repre in instance.data["representations"]: - if not repre.get('files'): + if not repre.get("files"): msg = ("no frames were collected, " "you need to render them") self.log.error(msg) raise ValidationException(msg) collections, remainder = clique.assemble(repre["files"]) - self.log.info('collections: {}'.format(str(collections))) - self.log.info('remainder: {}'.format(str(remainder))) + self.log.info("collections: {}".format(str(collections))) + self.log.info("remainder: {}".format(str(remainder))) collection = collections[0] frame_length = int( - instance.data["frameEndHandle"] - instance.data["frameStartHandle"] + 1 + instance.data["frameEndHandle"] + - instance.data["frameStartHandle"] + 1 ) if frame_length != 1: @@ -65,15 +94,10 @@ class ValidateRenderedFrames(pyblish.api.InstancePlugin): self.log.error(msg) raise ValidationException(msg) - # if len(remainder) != 0: - # msg = "There are some extra files in folder" - # self.log.error(msg) - # raise ValidationException(msg) - collected_frames_len = int(len(collection.indexes)) - self.log.info('frame_length: {}'.format(frame_length)) + self.log.info("frame_length: {}".format(frame_length)) self.log.info( - 'len(collection.indexes): {}'.format(collected_frames_len) + "len(collection.indexes): {}".format(collected_frames_len) ) if ("slate" in instance.data["families"]) \ @@ -84,6 +108,6 @@ class ValidateRenderedFrames(pyblish.api.InstancePlugin): "{} missing frames. Use repair to render all frames" ).format(__name__) - instance.data['collection'] = collection + instance.data["collection"] = collection return diff --git a/openpype/hosts/photoshop/api/__init__.py b/openpype/hosts/photoshop/api/__init__.py index 7304574ffd..81942c3b2a 100644 --- a/openpype/hosts/photoshop/api/__init__.py +++ b/openpype/hosts/photoshop/api/__init__.py @@ -9,7 +9,7 @@ from openpype import lib from pyblish import api as pyblish import openpype.hosts.photoshop -log = logging.getLogger("pype.hosts.photoshop") +log = logging.getLogger("openpype.hosts.photoshop") HOST_DIR = os.path.dirname(os.path.abspath(openpype.hosts.photoshop.__file__)) PLUGINS_DIR = os.path.join(HOST_DIR, "plugins") diff --git a/openpype/hosts/resolve/__init__.py b/openpype/hosts/resolve/__init__.py index 734e0bc5df..3e49ce3b9b 100644 --- a/openpype/hosts/resolve/__init__.py +++ b/openpype/hosts/resolve/__init__.py @@ -11,7 +11,9 @@ from .api.pipeline import ( update_container, publish, launch_workfiles_app, - maintained_selection + maintained_selection, + remove_instance, + list_instances ) from .api.lib import ( @@ -73,6 +75,8 @@ __all__ = [ "publish", "launch_workfiles_app", "maintained_selection", + "remove_instance", + "list_instances", # utils "setup", diff --git a/openpype/hosts/resolve/api/menu.py b/openpype/hosts/resolve/api/menu.py index 5ed7aeab34..e7be3fc963 100644 --- a/openpype/hosts/resolve/api/menu.py +++ b/openpype/hosts/resolve/api/menu.py @@ -12,7 +12,8 @@ from avalon.tools import ( creator, loader, sceneinventory, - libraryloader + libraryloader, + subsetmanager ) @@ -64,8 +65,9 @@ class OpenPypeMenu(QtWidgets.QWidget): publish_btn = QtWidgets.QPushButton("Publish ...", self) load_btn = QtWidgets.QPushButton("Load ...", self) inventory_btn = QtWidgets.QPushButton("Inventory ...", self) + subsetm_btn = QtWidgets.QPushButton("Subset Manager ...", self) libload_btn = QtWidgets.QPushButton("Library ...", self) - # rename_btn = QtWidgets.QPushButton("Rename ...", self) + # rename_btn = QtWidgets.QPushButton("Rename", self) # set_colorspace_btn = QtWidgets.QPushButton( # "Set colorspace from presets", self # ) @@ -81,6 +83,7 @@ class OpenPypeMenu(QtWidgets.QWidget): layout.addWidget(publish_btn) layout.addWidget(load_btn) layout.addWidget(inventory_btn) + layout.addWidget(subsetm_btn) layout.addWidget(Spacer(15, self)) @@ -102,6 +105,7 @@ class OpenPypeMenu(QtWidgets.QWidget): publish_btn.clicked.connect(self.on_publish_clicked) load_btn.clicked.connect(self.on_load_clicked) inventory_btn.clicked.connect(self.on_inventory_clicked) + subsetm_btn.clicked.connect(self.on_subsetm_clicked) libload_btn.clicked.connect(self.on_libload_clicked) # rename_btn.clicked.connect(self.on_rename_clicked) # set_colorspace_btn.clicked.connect(self.on_set_colorspace_clicked) @@ -127,6 +131,10 @@ class OpenPypeMenu(QtWidgets.QWidget): print("Clicked Inventory") sceneinventory.show() + def on_subsetm_clicked(self): + print("Clicked Subset Manager") + subsetmanager.show() + def on_libload_clicked(self): print("Clicked Library") libraryloader.show() diff --git a/openpype/hosts/resolve/api/pipeline.py b/openpype/hosts/resolve/api/pipeline.py index d4d928a7d9..a659ac7e51 100644 --- a/openpype/hosts/resolve/api/pipeline.py +++ b/openpype/hosts/resolve/api/pipeline.py @@ -47,7 +47,7 @@ def install(): avalon.data["familiesStateDefault"] = False avalon.data["familiesStateToggled"] = family_states - log.info("pype.hosts.resolve installed") + log.info("openpype.hosts.resolve installed") pyblish.register_host("resolve") pyblish.register_plugin_path(PUBLISH_PATH) @@ -258,3 +258,51 @@ def on_pyblish_instance_toggled(instance, old_value, new_value): # Whether instances should be passthrough based on new value timeline_item = instance.data["item"] set_publish_attribute(timeline_item, new_value) + + +def remove_instance(instance): + """Remove instance marker from track item.""" + instance_id = instance.get("uuid") + + selected_timeline_items = lib.get_current_timeline_items( + filter=True, selecting_color=lib.publish_clip_color) + + found_ti = None + for timeline_item_data in selected_timeline_items: + timeline_item = timeline_item_data["clip"]["item"] + + # get openpype tag data + tag_data = lib.get_timeline_item_pype_tag(timeline_item) + _ti_id = tag_data.get("uuid") + if _ti_id == instance_id: + found_ti = timeline_item + break + + if found_ti is None: + return + + # removing instance by marker color + print(f"Removing instance: {found_ti.GetName()}") + found_ti.DeleteMarkersByColor(lib.pype_marker_color) + + +def list_instances(): + """List all created instances from current workfile.""" + listed_instances = [] + selected_timeline_items = lib.get_current_timeline_items( + filter=True, selecting_color=lib.publish_clip_color) + + for timeline_item_data in selected_timeline_items: + timeline_item = timeline_item_data["clip"]["item"] + ti_name = timeline_item.GetName().split(".")[0] + + # get openpype tag data + tag_data = lib.get_timeline_item_pype_tag(timeline_item) + + if tag_data: + asset = tag_data.get("asset") + subset = tag_data.get("subset") + tag_data["label"] = f"{ti_name} [{asset}-{subset}]" + listed_instances.append(tag_data) + + return listed_instances diff --git a/openpype/hosts/resolve/api/plugin.py b/openpype/hosts/resolve/api/plugin.py index 3833795b96..4712d0a8b9 100644 --- a/openpype/hosts/resolve/api/plugin.py +++ b/openpype/hosts/resolve/api/plugin.py @@ -1,4 +1,5 @@ import re +import uuid from avalon import api import openpype.api as pype from openpype.hosts import resolve @@ -697,13 +698,13 @@ class PublishClip: Populating the tag data into internal variable self.tag_data """ # define vertical sync attributes - master_layer = True + hero_track = True self.review_layer = "" if self.vertical_sync: # check if track name is not in driving layer if self.track_name not in self.driving_layer: # if it is not then define vertical sync as None - master_layer = False + hero_track = False # increasing steps by index of rename iteration self.count_steps *= self.rename_index @@ -717,7 +718,7 @@ class PublishClip: self.tag_data[_k] = _v["value"] # driving layer is set as positive match - if master_layer or self.vertical_sync: + if hero_track or self.vertical_sync: # mark review layer if self.review_track and ( self.review_track not in self.review_track_default): @@ -751,35 +752,39 @@ class PublishClip: hierarchy_formating_data ) - tag_hierarchy_data.update({"masterLayer": True}) - if master_layer and self.vertical_sync: - # tag_hierarchy_data.update({"masterLayer": True}) + tag_hierarchy_data.update({"heroTrack": True}) + if hero_track and self.vertical_sync: self.vertical_clip_match.update({ (self.clip_in, self.clip_out): tag_hierarchy_data }) - if not master_layer and self.vertical_sync: + if not hero_track and self.vertical_sync: # driving layer is set as negative match - for (_in, _out), master_data in self.vertical_clip_match.items(): - master_data.update({"masterLayer": False}) + for (_in, _out), hero_data in self.vertical_clip_match.items(): + hero_data.update({"heroTrack": False}) if _in == self.clip_in and _out == self.clip_out: - data_subset = master_data["subset"] - # add track index in case duplicity of names in master data + data_subset = hero_data["subset"] + # add track index in case duplicity of names in hero data if self.subset in data_subset: - master_data["subset"] = self.subset + str( + hero_data["subset"] = self.subset + str( self.track_index) # in case track name and subset name is the same then add if self.subset_name == self.track_name: - master_data["subset"] = self.subset + hero_data["subset"] = self.subset # assing data to return hierarchy data to tag - tag_hierarchy_data = master_data + tag_hierarchy_data = hero_data # add data to return data dict self.tag_data.update(tag_hierarchy_data) - if master_layer and self.review_layer: + # add uuid to tag data + self.tag_data["uuid"] = str(uuid.uuid4()) + + # add review track only to hero track + if hero_track and self.review_layer: self.tag_data.update({"reviewTrack": self.review_layer}) + def _solve_tag_hierarchy_data(self, hierarchy_formating_data): """ Solve tag data from hierarchy data and templates. """ # fill up clip name and hierarchy keys diff --git a/openpype/hosts/resolve/hooks/pre_resolve_setup.py b/openpype/hosts/resolve/hooks/pre_resolve_setup.py index 0ee55d3790..bcb27e24fc 100644 --- a/openpype/hosts/resolve/hooks/pre_resolve_setup.py +++ b/openpype/hosts/resolve/hooks/pre_resolve_setup.py @@ -44,7 +44,7 @@ class ResolvePrelaunch(PreLaunchHook): self.launch_context.env["PRE_PYTHON_SCRIPT"] = pre_py_sc self.log.debug(f"-- pre_py_sc: `{pre_py_sc}`...") try: - __import__("pype.hosts.resolve") + __import__("openpype.hosts.resolve") __import__("pyblish") except ImportError: diff --git a/openpype/hosts/resolve/plugins/create/create_shot_clip.py b/openpype/hosts/resolve/plugins/create/create_shot_clip.py index 2916a52298..41fdbf5c61 100644 --- a/openpype/hosts/resolve/plugins/create/create_shot_clip.py +++ b/openpype/hosts/resolve/plugins/create/create_shot_clip.py @@ -117,7 +117,7 @@ class CreateShotClip(resolve.Creator): "vSyncTrack": { "value": gui_tracks, # noqa "type": "QComboBox", - "label": "Master track", + "label": "Hero track", "target": "ui", "toolTip": "Select driving track name which should be mastering all others", # noqa "order": 1} diff --git a/openpype/hosts/resolve/plugins/publish/collect_instances.py b/openpype/hosts/resolve/plugins/publish/precollect_instances.py similarity index 95% rename from openpype/hosts/resolve/plugins/publish/collect_instances.py rename to openpype/hosts/resolve/plugins/publish/precollect_instances.py index f4eeb39754..c38cbc4f73 100644 --- a/openpype/hosts/resolve/plugins/publish/collect_instances.py +++ b/openpype/hosts/resolve/plugins/publish/precollect_instances.py @@ -5,11 +5,11 @@ from openpype.hosts import resolve from pprint import pformat -class CollectInstances(pyblish.api.ContextPlugin): +class PrecollectInstances(pyblish.api.ContextPlugin): """Collect all Track items selection.""" order = pyblish.api.CollectorOrder - 0.59 - label = "Collect Instances" + label = "Precollect Instances" hosts = ["resolve"] def process(self, context): @@ -26,7 +26,7 @@ class CollectInstances(pyblish.api.ContextPlugin): data = dict() timeline_item = timeline_item_data["clip"]["item"] - # get openpype tag data + # get pype tag data tag_data = resolve.get_timeline_item_pype_tag(timeline_item) self.log.debug(f"__ tag_data: {pformat(tag_data)}") @@ -102,10 +102,10 @@ class CollectInstances(pyblish.api.ContextPlugin): }) def create_shot_instance(self, context, timeline_item, **data): - master_layer = data.get("masterLayer") + hero_track = data.get("heroTrack") hierarchy_data = data.get("hierarchyData") - if not master_layer: + if not hero_track: return if not hierarchy_data: diff --git a/openpype/hosts/resolve/plugins/publish/collect_workfile.py b/openpype/hosts/resolve/plugins/publish/precollect_workfile.py similarity index 88% rename from openpype/hosts/resolve/plugins/publish/collect_workfile.py rename to openpype/hosts/resolve/plugins/publish/precollect_workfile.py index a66284ed02..ee05fb6f13 100644 --- a/openpype/hosts/resolve/plugins/publish/collect_workfile.py +++ b/openpype/hosts/resolve/plugins/publish/precollect_workfile.py @@ -9,10 +9,10 @@ from openpype.hosts.resolve.otio import davinci_export reload(davinci_export) -class CollectWorkfile(pyblish.api.ContextPlugin): - """Inject the current working file into context""" +class PrecollectWorkfile(pyblish.api.ContextPlugin): + """Precollect the current working file into context""" - label = "Collect Workfile" + label = "Precollect Workfile" order = pyblish.api.CollectorOrder - 0.6 def process(self, context): @@ -21,8 +21,6 @@ class CollectWorkfile(pyblish.api.ContextPlugin): subset = "workfile" project = resolve.get_current_project() fps = project.GetSetting("timelineFrameRate") - - active_timeline = resolve.get_current_timeline() video_tracks = resolve.get_video_track_names() # adding otio timeline to context diff --git a/openpype/hosts/resolve/utility_scripts/OTIO_export.py b/openpype/hosts/resolve/utility_scripts/OTIO_export.py index 91bc2c5700..0431eb7daa 100644 --- a/openpype/hosts/resolve/utility_scripts/OTIO_export.py +++ b/openpype/hosts/resolve/utility_scripts/OTIO_export.py @@ -58,9 +58,8 @@ def _close_window(event): def _export_button(event): pm = resolve.GetProjectManager() project = pm.GetCurrentProject() - fps = project.GetSetting("timelineFrameRate") timeline = project.GetCurrentTimeline() - otio_timeline = otio_export.create_otio_timeline(timeline, fps) + otio_timeline = otio_export.create_otio_timeline(project) otio_path = os.path.join( itm["exportfilebttn"].Text, timeline.GetName() + ".otio") diff --git a/openpype/hosts/tvpaint/api/__init__.py b/openpype/hosts/tvpaint/api/__init__.py index 068559049b..bd9ef51a76 100644 --- a/openpype/hosts/tvpaint/api/__init__.py +++ b/openpype/hosts/tvpaint/api/__init__.py @@ -1,14 +1,15 @@ import os import logging -from avalon.tvpaint.communication_server import register_localization_file -from avalon.tvpaint import pipeline import avalon.api import pyblish.api +from avalon.tvpaint import pipeline +from avalon.tvpaint.communication_server import register_localization_file +from .lib import set_context_settings from openpype.hosts import tvpaint -log = logging.getLogger("openpype.hosts.tvpaint") +log = logging.getLogger(__name__) HOST_DIR = os.path.dirname(os.path.abspath(tvpaint.__file__)) PLUGINS_DIR = os.path.join(HOST_DIR, "plugins") @@ -34,6 +35,18 @@ def on_instance_toggle(instance, old_value, new_value): pipeline._write_instances(current_instances) +def initial_launch(): + # Setup project settings if its the template that's launched. + # TODO also check for template creation when it's possible to define + # templates + last_workfile = os.environ.get("AVALON_LAST_WORKFILE") + if not last_workfile or os.path.exists(last_workfile): + return + + log.info("Setting up project...") + set_context_settings() + + def install(): log.info("OpenPype - Installing TVPaint integration") localization_file = os.path.join(HOST_DIR, "resources", "avalon.loc") @@ -49,6 +62,8 @@ def install(): if on_instance_toggle not in registered_callbacks: pyblish.api.register_callback("instanceToggled", on_instance_toggle) + avalon.api.on("application.launched", initial_launch) + def uninstall(): log.info("OpenPype - Uninstalling TVPaint integration") diff --git a/openpype/hosts/tvpaint/api/lib.py b/openpype/hosts/tvpaint/api/lib.py index 4267129fe6..539cebe646 100644 --- a/openpype/hosts/tvpaint/api/lib.py +++ b/openpype/hosts/tvpaint/api/lib.py @@ -1,5 +1,8 @@ from PIL import Image +import avalon.io +from avalon.tvpaint.lib import execute_george + def composite_images(input_image_paths, output_filepath): """Composite images in order from passed list. @@ -18,3 +21,65 @@ def composite_images(input_image_paths, output_filepath): else: img_obj.alpha_composite(_img_obj) img_obj.save(output_filepath) + + +def set_context_settings(asset_doc=None): + """Set workfile settings by asset document data. + + Change fps, resolution and frame start/end. + """ + if asset_doc is None: + # Use current session asset if not passed + asset_doc = avalon.io.find_one({ + "type": "asset", + "name": avalon.io.Session["AVALON_ASSET"] + }) + + project_doc = avalon.io.find_one({"type": "project"}) + + framerate = asset_doc["data"].get("fps") + if framerate is None: + framerate = project_doc["data"].get("fps") + + if framerate is not None: + execute_george( + "tv_framerate {} \"timestretch\"".format(framerate) + ) + else: + print("Framerate was not found!") + + width_key = "resolutionWidth" + height_key = "resolutionHeight" + + width = asset_doc["data"].get(width_key) + height = asset_doc["data"].get(height_key) + if width is None or height is None: + width = project_doc["data"].get(width_key) + height = project_doc["data"].get(height_key) + + if width is None or height is None: + print("Resolution was not found!") + else: + execute_george("tv_resizepage {} {} 0".format(width, height)) + + frame_start = asset_doc["data"].get("frameStart") + frame_end = asset_doc["data"].get("frameEnd") + + if frame_start is None or frame_end is None: + print("Frame range was not found!") + return + + handles = asset_doc["data"].get("handles") or 0 + handle_start = asset_doc["data"].get("handleStart") + handle_end = asset_doc["data"].get("handleEnd") + + if handle_start is None or handle_end is None: + handle_start = handles + handle_end = handles + + # Always start from 0 Mark In and set only Mark Out + mark_in = 0 + mark_out = mark_in + (frame_end - frame_start) + handle_start + handle_end + + execute_george("tv_markin {} set".format(mark_in)) + execute_george("tv_markout {} set".format(mark_out)) diff --git a/openpype/hosts/tvpaint/plugins/publish/collect_instance_frames.py b/openpype/hosts/tvpaint/plugins/publish/collect_instance_frames.py new file mode 100644 index 0000000000..f291c363b8 --- /dev/null +++ b/openpype/hosts/tvpaint/plugins/publish/collect_instance_frames.py @@ -0,0 +1,37 @@ +import pyblish.api + + +class CollectOutputFrameRange(pyblish.api.ContextPlugin): + """Collect frame start/end from context. + + When instances are collected context does not contain `frameStart` and + `frameEnd` keys yet. They are collected in global plugin + `CollectAvalonEntities`. + """ + label = "Collect output frame range" + order = pyblish.api.CollectorOrder + hosts = ["tvpaint"] + + def process(self, context): + for instance in context: + frame_start = instance.data.get("frameStart") + frame_end = instance.data.get("frameEnd") + if frame_start is not None and frame_end is not None: + self.log.debug( + "Instance {} already has set frames {}-{}".format( + str(instance), frame_start, frame_end + ) + ) + return + + frame_start = context.data.get("frameStart") + frame_end = context.data.get("frameEnd") + + instance.data["frameStart"] = frame_start + instance.data["frameEnd"] = frame_end + + self.log.info( + "Set frames {}-{} on instance {} ".format( + frame_start, frame_end, str(instance) + ) + ) diff --git a/openpype/hosts/tvpaint/plugins/publish/collect_instances.py b/openpype/hosts/tvpaint/plugins/publish/collect_instances.py index 57602d9610..27bd8e9ede 100644 --- a/openpype/hosts/tvpaint/plugins/publish/collect_instances.py +++ b/openpype/hosts/tvpaint/plugins/publish/collect_instances.py @@ -18,7 +18,7 @@ class CollectInstances(pyblish.api.ContextPlugin): )) for instance_data in workfile_instances: - instance_data["fps"] = context.data["fps"] + instance_data["fps"] = context.data["sceneFps"] # Store workfile instance data to instance data instance_data["originData"] = copy.deepcopy(instance_data) @@ -32,6 +32,11 @@ class CollectInstances(pyblish.api.ContextPlugin): subset_name = instance_data["subset"] name = instance_data.get("name", subset_name) instance_data["name"] = name + instance_data["label"] = "{} [{}-{}]".format( + name, + context.data["sceneMarkIn"] + 1, + context.data["sceneMarkOut"] + 1 + ) active = instance_data.get("active", True) instance_data["active"] = active @@ -73,8 +78,13 @@ class CollectInstances(pyblish.api.ContextPlugin): if instance is None: continue - instance.data["frameStart"] = context.data["frameStart"] - instance.data["frameEnd"] = context.data["frameEnd"] + any_visible = False + for layer in instance.data["layers"]: + if layer["visible"]: + any_visible = True + break + + instance.data["publish"] = any_visible self.log.debug("Created instance: {}\n{}".format( instance, json.dumps(instance.data, indent=4) @@ -103,7 +113,7 @@ class CollectInstances(pyblish.api.ContextPlugin): group_id = instance_data["group_id"] group_layers = [] for layer in layers_data: - if layer["group_id"] == group_id and layer["visible"]: + if layer["group_id"] == group_id: group_layers.append(layer) if not group_layers: diff --git a/openpype/hosts/tvpaint/plugins/publish/collect_workfile_data.py b/openpype/hosts/tvpaint/plugins/publish/collect_workfile_data.py index 7965112136..13c6c9eb78 100644 --- a/openpype/hosts/tvpaint/plugins/publish/collect_workfile_data.py +++ b/openpype/hosts/tvpaint/plugins/publish/collect_workfile_data.py @@ -57,7 +57,10 @@ class CollectWorkfileData(pyblish.api.ContextPlugin): # Collect context from workfile metadata self.log.info("Collecting workfile context") + workfile_context = pipeline.get_current_workfile_context() + # Store workfile context to pyblish context + context.data["workfile_context"] = workfile_context if workfile_context: # Change current context with context from workfile key_map = ( @@ -67,16 +70,27 @@ class CollectWorkfileData(pyblish.api.ContextPlugin): for env_key, key in key_map: avalon.api.Session[env_key] = workfile_context[key] os.environ[env_key] = workfile_context[key] + self.log.info("Context changed to: {}".format(workfile_context)) + + asset_name = workfile_context["asset"] + task_name = workfile_context["task"] + else: + asset_name = current_context["asset"] + task_name = current_context["task"] # Handle older workfiles or workfiles without metadata - self.log.warning( + self.log.warning(( "Workfile does not contain information about context." " Using current Session context." - ) - workfile_context = current_context.copy() + )) - context.data["workfile_context"] = workfile_context - self.log.info("Context changed to: {}".format(workfile_context)) + # Store context asset name + context.data["asset"] = asset_name + self.log.info( + "Context is set to Asset: \"{}\" and Task: \"{}\"".format( + asset_name, task_name + ) + ) # Collect instances self.log.info("Collecting instance data from workfile") @@ -122,36 +136,26 @@ class CollectWorkfileData(pyblish.api.ContextPlugin): width = int(workfile_info_parts.pop(-1)) workfile_path = " ".join(workfile_info_parts).replace("\"", "") - frame_start, frame_end = self.collect_clip_frames() + # Marks return as "{frame - 1} {state} ", example "0 set". + result = lib.execute_george("tv_markin") + mark_in_frame, mark_in_state, _ = result.split(" ") + + result = lib.execute_george("tv_markout") + mark_out_frame, mark_out_state, _ = result.split(" ") + scene_data = { "currentFile": workfile_path, "sceneWidth": width, "sceneHeight": height, - "pixelAspect": pixel_apsect, - "frameStart": frame_start, - "frameEnd": frame_end, - "fps": frame_rate, - "fieldOrder": field_order + "scenePixelAspect": pixel_apsect, + "sceneFps": frame_rate, + "sceneFieldOrder": field_order, + "sceneMarkIn": int(mark_in_frame), + "sceneMarkInState": mark_in_state == "set", + "sceneMarkOut": int(mark_out_frame), + "sceneMarkOutState": mark_out_state == "set" } self.log.debug( "Scene data: {}".format(json.dumps(scene_data, indent=4)) ) context.data.update(scene_data) - - def collect_clip_frames(self): - clip_info_str = lib.execute_george("tv_clipinfo") - self.log.debug("Clip info: {}".format(clip_info_str)) - clip_info_items = clip_info_str.split(" ") - # Color index - not used - clip_info_items.pop(-1) - clip_info_items.pop(-1) - - mark_out = int(clip_info_items.pop(-1)) - frame_end = mark_out + 1 - clip_info_items.pop(-1) - - mark_in = int(clip_info_items.pop(-1)) - frame_start = mark_in + 1 - clip_info_items.pop(-1) - - return frame_start, frame_end diff --git a/openpype/hosts/tvpaint/plugins/publish/extract_sequence.py b/openpype/hosts/tvpaint/plugins/publish/extract_sequence.py index 0d125a1a50..007b5c41f1 100644 --- a/openpype/hosts/tvpaint/plugins/publish/extract_sequence.py +++ b/openpype/hosts/tvpaint/plugins/publish/extract_sequence.py @@ -1,8 +1,6 @@ import os import shutil -import time import tempfile -import multiprocessing import pyblish.api from avalon.tvpaint import lib @@ -45,10 +43,64 @@ class ExtractSequence(pyblish.api.Extractor): ) family_lowered = instance.data["family"].lower() - frame_start = instance.data["frameStart"] - frame_end = instance.data["frameEnd"] + mark_in = instance.context.data["sceneMarkIn"] + mark_out = instance.context.data["sceneMarkOut"] + # Frame start/end may be stored as float + frame_start = int(instance.data["frameStart"]) + frame_end = int(instance.data["frameEnd"]) - filename_template = self._get_filename_template(frame_end) + # Handles are not stored per instance but on Context + handle_start = instance.context.data["handleStart"] + handle_end = instance.context.data["handleEnd"] + + # --- Fallbacks ---------------------------------------------------- + # This is required if validations of ranges are ignored. + # - all of this code won't change processing if range to render + # match to range of expected output + + # Prepare output frames + output_frame_start = frame_start - handle_start + output_frame_end = frame_end + handle_end + + # Change output frame start to 0 if handles cause it's negative number + if output_frame_start < 0: + self.log.warning(( + "Frame start with handles has negative value." + " Changed to \"0\". Frames start: {}, Handle Start: {}" + ).format(frame_start, handle_start)) + output_frame_start = 0 + + # Check Marks range and output range + output_range = output_frame_end - output_frame_start + marks_range = mark_out - mark_in + + # Lower Mark Out if mark range is bigger than output + # - do not rendered not used frames + if output_range < marks_range: + new_mark_out = mark_out - (marks_range - output_range) + self.log.warning(( + "Lowering render range to {} frames. Changed Mark Out {} -> {}" + ).format(marks_range + 1, mark_out, new_mark_out)) + # Assign new mark out to variable + mark_out = new_mark_out + + # Lower output frame end so representation has right `frameEnd` value + elif output_range > marks_range: + new_output_frame_end = ( + output_frame_end - (output_range - marks_range) + ) + self.log.warning(( + "Lowering representation range to {} frames." + " Changed frame end {} -> {}" + ).format(output_range + 1, mark_out, new_mark_out)) + output_frame_end = new_output_frame_end + + # ------------------------------------------------------------------- + + filename_template = self._get_filename_template( + # Use the biggest number + max(mark_out, frame_end) + ) ext = os.path.splitext(filename_template)[1].replace(".", "") self.log.debug("Using file template \"{}\"".format(filename_template)) @@ -57,7 +109,9 @@ class ExtractSequence(pyblish.api.Extractor): output_dir = instance.data.get("stagingDir") if not output_dir: # Create temp folder if staging dir is not set - output_dir = tempfile.mkdtemp().replace("\\", "/") + output_dir = ( + tempfile.mkdtemp(prefix="tvpaint_render_") + ).replace("\\", "/") instance.data["stagingDir"] = output_dir self.log.debug( @@ -65,23 +119,36 @@ class ExtractSequence(pyblish.api.Extractor): ) if instance.data["family"] == "review": - repre_files, thumbnail_fullpath = self.render_review( - filename_template, output_dir, frame_start, frame_end + output_filenames, thumbnail_fullpath = self.render_review( + filename_template, output_dir, mark_in, mark_out ) else: # Render output - repre_files, thumbnail_fullpath = self.render( - filename_template, output_dir, frame_start, frame_end, + output_filenames, thumbnail_fullpath = self.render( + filename_template, output_dir, + mark_in, mark_out, filtered_layers ) + # Sequence of one frame + if not output_filenames: + self.log.warning("Extractor did not create any output.") + return + + repre_files = self._rename_output_files( + filename_template, output_dir, + mark_in, mark_out, + output_frame_start, output_frame_end + ) + # Fill tags and new families tags = [] if family_lowered in ("review", "renderlayer"): tags.append("review") # Sequence of one frame - if len(repre_files) == 1: + single_file = len(repre_files) == 1 + if single_file: repre_files = repre_files[0] new_repre = { @@ -89,10 +156,13 @@ class ExtractSequence(pyblish.api.Extractor): "ext": ext, "files": repre_files, "stagingDir": output_dir, - "frameStart": frame_start, - "frameEnd": frame_end, "tags": tags } + + if not single_file: + new_repre["frameStart"] = output_frame_start + new_repre["frameEnd"] = output_frame_end + self.log.debug("Creating new representation: {}".format(new_repre)) instance.data["representations"].append(new_repre) @@ -133,9 +203,45 @@ class ExtractSequence(pyblish.api.Extractor): return "{{frame:0>{}}}".format(frame_padding) + ".png" - def render_review( - self, filename_template, output_dir, frame_start, frame_end + def _rename_output_files( + self, filename_template, output_dir, + mark_in, mark_out, output_frame_start, output_frame_end ): + # Use differnet ranges based on Mark In and output Frame Start values + # - this is to make sure that filename renaming won't affect files that + # are not renamed yet + mark_start_is_less = bool(mark_in < output_frame_start) + if mark_start_is_less: + marks_range = range(mark_out, mark_in - 1, -1) + frames_range = range(output_frame_end, output_frame_start - 1, -1) + else: + # This is less possible situation as frame start will be in most + # cases higher than Mark In. + marks_range = range(mark_in, mark_out + 1) + frames_range = range(output_frame_start, output_frame_end + 1) + + repre_filepaths = [] + for mark, frame in zip(marks_range, frames_range): + new_filename = filename_template.format(frame=frame) + new_filepath = os.path.join(output_dir, new_filename) + + repre_filepaths.append(new_filepath) + + if mark != frame: + old_filename = filename_template.format(frame=mark) + old_filepath = os.path.join(output_dir, old_filename) + os.rename(old_filepath, new_filepath) + + # Reverse repre files order if output + if mark_start_is_less: + repre_filepaths = list(reversed(repre_filepaths)) + + return [ + os.path.basename(path) + for path in repre_filepaths + ] + + def render_review(self, filename_template, output_dir, mark_in, mark_out): """ Export images from TVPaint using `tv_savesequence` command. Args: @@ -144,8 +250,8 @@ class ExtractSequence(pyblish.api.Extractor): keyword argument `{frame}` or index argument (for same value). Extension in template must match `save_mode`. output_dir (str): Directory where files will be stored. - first_frame (int): Starting frame from which export will begin. - last_frame (int): On which frame export will end. + mark_in (int): Starting frame index from which export will begin. + mark_out (int): On which frame index export will end. Retruns: tuple: With 2 items first is list of filenames second is path to @@ -154,10 +260,8 @@ class ExtractSequence(pyblish.api.Extractor): self.log.debug("Preparing data for rendering.") first_frame_filepath = os.path.join( output_dir, - filename_template.format(frame=frame_start) + filename_template.format(frame=mark_in) ) - mark_in = frame_start - 1 - mark_out = frame_end - 1 george_script_lines = [ "tv_SaveMode \"PNG\"", @@ -170,13 +274,22 @@ class ExtractSequence(pyblish.api.Extractor): ] lib.execute_george_through_file("\n".join(george_script_lines)) - output = [] first_frame_filepath = None - for frame in range(frame_start, frame_end + 1): + output_filenames = [] + for frame in range(mark_in, mark_out + 1): filename = filename_template.format(frame=frame) - output.append(filename) + output_filenames.append(filename) + + filepath = os.path.join(output_dir, filename) + if not os.path.exists(filepath): + raise AssertionError( + "Output was not rendered. File was not found {}".format( + filepath + ) + ) + if first_frame_filepath is None: - first_frame_filepath = os.path.join(output_dir, filename) + first_frame_filepath = filepath thumbnail_filepath = os.path.join(output_dir, "thumbnail.jpg") if first_frame_filepath and os.path.exists(first_frame_filepath): @@ -184,11 +297,10 @@ class ExtractSequence(pyblish.api.Extractor): thumbnail_obj = Image.new("RGB", source_img.size, (255, 255, 255)) thumbnail_obj.paste(source_img) thumbnail_obj.save(thumbnail_filepath) - return output, thumbnail_filepath - def render( - self, filename_template, output_dir, frame_start, frame_end, layers - ): + return output_filenames, thumbnail_filepath + + def render(self, filename_template, output_dir, mark_in, mark_out, layers): """ Export images from TVPaint. Args: @@ -197,8 +309,8 @@ class ExtractSequence(pyblish.api.Extractor): keyword argument `{frame}` or index argument (for same value). Extension in template must match `save_mode`. output_dir (str): Directory where files will be stored. - first_frame (int): Starting frame from which export will begin. - last_frame (int): On which frame export will end. + mark_in (int): Starting frame index from which export will begin. + mark_out (int): On which frame index export will end. layers (list): List of layers to be exported. Retruns: @@ -219,14 +331,11 @@ class ExtractSequence(pyblish.api.Extractor): # Sort layer positions in reverse order sorted_positions = list(reversed(sorted(layers_by_position.keys()))) if not sorted_positions: - return + return [], None self.log.debug("Collecting pre/post behavior of individual layers.") behavior_by_layer_id = lib.get_layers_pre_post_behavior(layer_ids) - mark_in_index = frame_start - 1 - mark_out_index = frame_end - 1 - tmp_filename_template = "pos_{pos}." + filename_template files_by_position = {} @@ -239,25 +348,47 @@ class ExtractSequence(pyblish.api.Extractor): tmp_filename_template, output_dir, behavior, - mark_in_index, - mark_out_index + mark_in, + mark_out ) - files_by_position[position] = files_by_frames + if files_by_frames: + files_by_position[position] = files_by_frames + else: + self.log.warning(( + "Skipped layer \"{}\". Probably out of Mark In/Out range." + ).format(layer["name"])) + + if not files_by_position: + layer_names = set(layer["name"] for layer in layers) + joined_names = ", ".join( + ["\"{}\"".format(name) for name in layer_names] + ) + self.log.warning( + "Layers {} do not have content in range {} - {}".format( + joined_names, mark_in, mark_out + ) + ) + return [], None output_filepaths = self._composite_files( files_by_position, - mark_in_index, - mark_out_index, + mark_in, + mark_out, filename_template, output_dir ) self._cleanup_tmp_files(files_by_position) - thumbnail_src_filepath = None - thumbnail_filepath = None - if output_filepaths: - thumbnail_src_filepath = tuple(sorted(output_filepaths))[0] + output_filenames = [ + os.path.basename(filepath) + for filepath in output_filepaths + ] + thumbnail_src_filepath = None + if output_filepaths: + thumbnail_src_filepath = output_filepaths[0] + + thumbnail_filepath = None if thumbnail_src_filepath and os.path.exists(thumbnail_src_filepath): source_img = Image.open(thumbnail_src_filepath) thumbnail_filepath = os.path.join(output_dir, "thumbnail.jpg") @@ -265,11 +396,7 @@ class ExtractSequence(pyblish.api.Extractor): thumbnail_obj.paste(source_img) thumbnail_obj.save(thumbnail_filepath) - repre_files = [ - os.path.basename(path) - for path in output_filepaths - ] - return repre_files, thumbnail_filepath + return output_filenames, thumbnail_filepath def _render_layer( self, @@ -283,6 +410,22 @@ class ExtractSequence(pyblish.api.Extractor): layer_id = layer["layer_id"] frame_start_index = layer["frame_start"] frame_end_index = layer["frame_end"] + + pre_behavior = behavior["pre"] + post_behavior = behavior["post"] + + # Check if layer is before mark in + if frame_end_index < mark_in_index: + # Skip layer if post behavior is "none" + if post_behavior == "none": + return {} + + # Check if layer is after mark out + elif frame_start_index > mark_out_index: + # Skip layer if pre behavior is "none" + if pre_behavior == "none": + return {} + exposure_frames = lib.get_exposure_frames( layer_id, frame_start_index, frame_end_index ) @@ -341,8 +484,6 @@ class ExtractSequence(pyblish.api.Extractor): self.log.debug("Filled frames {}".format(str(_debug_filled_frames))) # Fill frames by pre/post behavior of layer - pre_behavior = behavior["pre"] - post_behavior = behavior["post"] self.log.debug(( "Completing image sequence of layer by pre/post behavior." " PRE: {} | POST: {}" @@ -530,17 +671,12 @@ class ExtractSequence(pyblish.api.Extractor): filepath = position_data[frame_idx] images_by_frame[frame_idx].append(filepath) - process_count = os.cpu_count() - if process_count > 1: - process_count -= 1 - - processes = {} output_filepaths = [] missing_frame_paths = [] random_frame_path = None for frame_idx in sorted(images_by_frame.keys()): image_filepaths = images_by_frame[frame_idx] - output_filename = filename_template.format(frame=frame_idx + 1) + output_filename = filename_template.format(frame=frame_idx) output_filepath = os.path.join(output_dir, output_filename) output_filepaths.append(output_filepath) @@ -553,45 +689,15 @@ class ExtractSequence(pyblish.api.Extractor): if len(image_filepaths) == 1: os.rename(image_filepaths[0], output_filepath) - # Prepare process for compositing of images + # Composite images else: - processes[frame_idx] = multiprocessing.Process( - target=composite_images, - args=(image_filepaths, output_filepath) - ) + composite_images(image_filepaths, output_filepath) # Store path of random output image that will 100% exist after all # multiprocessing as mockup for missing frames if random_frame_path is None: random_frame_path = output_filepath - self.log.info( - "Running {} compositing processes - this mey take a while.".format( - len(processes) - ) - ) - # Wait until all compositing processes are done - running_processes = {} - while True: - for idx in tuple(running_processes.keys()): - process = running_processes[idx] - if not process.is_alive(): - running_processes.pop(idx).join() - - if processes and len(running_processes) != process_count: - indexes = list(processes.keys()) - for _ in range(process_count - len(running_processes)): - if not indexes: - break - idx = indexes.pop(0) - running_processes[idx] = processes.pop(idx) - running_processes[idx].start() - - if not running_processes and not processes: - break - - time.sleep(0.01) - self.log.debug( "Creating transparent images for frames without render {}.".format( str(missing_frame_paths) diff --git a/openpype/hosts/tvpaint/plugins/publish/validate_asset_name.py b/openpype/hosts/tvpaint/plugins/publish/validate_asset_name.py new file mode 100644 index 0000000000..4ce8d5347d --- /dev/null +++ b/openpype/hosts/tvpaint/plugins/publish/validate_asset_name.py @@ -0,0 +1,55 @@ +import pyblish.api +from avalon.tvpaint import pipeline + + +class FixAssetNames(pyblish.api.Action): + """Repair the asset names. + + Change instanace metadata in the workfile. + """ + + label = "Repair" + icon = "wrench" + on = "failed" + + def process(self, context, plugin): + context_asset_name = context.data["asset"] + old_instance_items = pipeline.list_instances() + new_instance_items = [] + for instance_item in old_instance_items: + instance_asset_name = instance_item.get("asset") + if ( + instance_asset_name + and instance_asset_name != context_asset_name + ): + instance_item["asset"] = context_asset_name + new_instance_items.append(instance_item) + pipeline._write_instances(new_instance_items) + + +class ValidateMissingLayers(pyblish.api.ContextPlugin): + """Validate assset name present on instance. + + Asset name on instance should be the same as context's. + """ + + label = "Validate Asset Names" + order = pyblish.api.ValidatorOrder + hosts = ["tvpaint"] + actions = [FixAssetNames] + + def process(self, context): + context_asset_name = context.data["asset"] + for instance in context: + asset_name = instance.data.get("asset") + if asset_name and asset_name == context_asset_name: + continue + + instance_label = ( + instance.data.get("label") or instance.data["name"] + ) + raise AssertionError(( + "Different asset name on instance then context's." + " Instance \"{}\" has asset name: \"{}\"" + " Context asset name is: \"{}\"" + ).format(instance_label, asset_name, context_asset_name)) diff --git a/openpype/hosts/tvpaint/plugins/publish/validate_marks.py b/openpype/hosts/tvpaint/plugins/publish/validate_marks.py new file mode 100644 index 0000000000..e2ef81e4a4 --- /dev/null +++ b/openpype/hosts/tvpaint/plugins/publish/validate_marks.py @@ -0,0 +1,81 @@ +import json + +import pyblish.api +from avalon.tvpaint import lib + + +class ValidateMarksRepair(pyblish.api.Action): + """Repair the marks.""" + + label = "Repair" + icon = "wrench" + on = "failed" + + def process(self, context, plugin): + expected_data = ValidateMarks.get_expected_data(context) + + lib.execute_george( + "tv_markin {} set".format(expected_data["markIn"]) + ) + lib.execute_george( + "tv_markout {} set".format(expected_data["markOut"]) + ) + + +class ValidateMarks(pyblish.api.ContextPlugin): + """Validate mark in and out are enabled and it's duration. + + Mark In/Out does not have to match frameStart and frameEnd but duration is + important. + """ + + label = "Validate Mark In/Out" + order = pyblish.api.ValidatorOrder + optional = True + actions = [ValidateMarksRepair] + + @staticmethod + def get_expected_data(context): + scene_mark_in = context.data["sceneMarkIn"] + + # Data collected in `CollectAvalonEntities` + frame_end = context.data["frameEnd"] + frame_start = context.data["frameStart"] + handle_start = context.data["handleStart"] + handle_end = context.data["handleEnd"] + + # Calculate expeted Mark out (Mark In + duration - 1) + expected_mark_out = ( + scene_mark_in + + (frame_end - frame_start) + + handle_start + handle_end + ) + return { + "markIn": scene_mark_in, + "markInState": True, + "markOut": expected_mark_out, + "markOutState": True + } + + def process(self, context): + current_data = { + "markIn": context.data["sceneMarkIn"], + "markInState": context.data["sceneMarkInState"], + "markOut": context.data["sceneMarkOut"], + "markOutState": context.data["sceneMarkOutState"] + } + expected_data = self.get_expected_data(context) + invalid = {} + for k in current_data.keys(): + if current_data[k] != expected_data[k]: + invalid[k] = { + "current": current_data[k], + "expected": expected_data[k] + } + + if invalid: + raise AssertionError( + "Marks does not match database:\n{}".format( + json.dumps(invalid, sort_keys=True, indent=4) + ) + ) diff --git a/openpype/hosts/tvpaint/plugins/publish/validate_project_settings.py b/openpype/hosts/tvpaint/plugins/publish/validate_project_settings.py new file mode 100644 index 0000000000..84c03a9857 --- /dev/null +++ b/openpype/hosts/tvpaint/plugins/publish/validate_project_settings.py @@ -0,0 +1,34 @@ +import json + +import pyblish.api + + +class ValidateProjectSettings(pyblish.api.ContextPlugin): + """Validate project settings against database. + """ + + label = "Validate Project Settings" + order = pyblish.api.ValidatorOrder + optional = True + + def process(self, context): + scene_data = { + "fps": context.data.get("sceneFps"), + "resolutionWidth": context.data.get("sceneWidth"), + "resolutionHeight": context.data.get("sceneHeight"), + "pixelAspect": context.data.get("scenePixelAspect") + } + invalid = {} + for k in scene_data.keys(): + expected_value = context.data["assetEntity"]["data"][k] + if scene_data[k] != expected_value: + invalid[k] = { + "current": scene_data[k], "expected": expected_value + } + + if invalid: + raise AssertionError( + "Project settings does not match database:\n{}".format( + json.dumps(invalid, sort_keys=True, indent=4) + ) + ) diff --git a/openpype/hosts/tvpaint/plugins/publish/validate_workfile_project_name.py b/openpype/hosts/tvpaint/plugins/publish/validate_workfile_project_name.py index 7c1032fcad..cc664d8030 100644 --- a/openpype/hosts/tvpaint/plugins/publish/validate_workfile_project_name.py +++ b/openpype/hosts/tvpaint/plugins/publish/validate_workfile_project_name.py @@ -13,7 +13,15 @@ class ValidateWorkfileProjectName(pyblish.api.ContextPlugin): order = pyblish.api.ValidatorOrder def process(self, context): - workfile_context = context.data["workfile_context"] + workfile_context = context.data.get("workfile_context") + # If workfile context is missing than project is matching to + # `AVALON_PROJECT` value for 100% + if not workfile_context: + self.log.info( + "Workfile context (\"workfile_context\") is not filled." + ) + return + workfile_project_name = workfile_context["project"] env_project_name = os.environ["AVALON_PROJECT"] if workfile_project_name == env_project_name: diff --git a/openpype/hosts/unreal/__init__.py b/openpype/hosts/unreal/__init__.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/openpype/hosts/unreal/hooks/pre_workfile_preparation.py b/openpype/hosts/unreal/hooks/pre_workfile_preparation.py index 5945d0486b..f084cccfc3 100644 --- a/openpype/hosts/unreal/hooks/pre_workfile_preparation.py +++ b/openpype/hosts/unreal/hooks/pre_workfile_preparation.py @@ -23,8 +23,8 @@ class UnrealPrelaunchHook(PreLaunchHook): def execute(self): asset_name = self.data["asset_name"] task_name = self.data["task_name"] - workdir = self.env["AVALON_WORKDIR"] - engine_version = self.app_name.split("_")[-1] + workdir = self.launch_context.env["AVALON_WORKDIR"] + engine_version = self.app_name.split("/")[-1].replace("-", ".") unreal_project_name = f"{asset_name}_{task_name}" # Unreal is sensitive about project names longer then 20 chars @@ -81,8 +81,8 @@ class UnrealPrelaunchHook(PreLaunchHook): # Set "AVALON_UNREAL_PLUGIN" to current process environment for # execution of `create_unreal_project` env_key = "AVALON_UNREAL_PLUGIN" - if self.env.get(env_key): - os.environ[env_key] = self.env[env_key] + if self.launch_context.env.get(env_key): + os.environ[env_key] = self.launch_context.env[env_key] unreal_lib.create_unreal_project( unreal_project_name, diff --git a/openpype/hosts/unreal/plugins/load/load_alembic_geometrycache.py b/openpype/hosts/unreal/plugins/load/load_alembic_geometrycache.py new file mode 100644 index 0000000000..a9279bf6e0 --- /dev/null +++ b/openpype/hosts/unreal/plugins/load/load_alembic_geometrycache.py @@ -0,0 +1,162 @@ +import os + +from avalon import api, pipeline +from avalon.unreal import lib +from avalon.unreal import pipeline as unreal_pipeline +import unreal + + +class PointCacheAlembicLoader(api.Loader): + """Load Point Cache from Alembic""" + + families = ["model", "pointcache"] + label = "Import Alembic Point Cache" + representations = ["abc"] + icon = "cube" + color = "orange" + + def load(self, context, name, namespace, data): + """ + Load and containerise representation into Content Browser. + + This is two step process. First, import FBX to temporary path and + then call `containerise()` on it - this moves all content to new + directory and then it will create AssetContainer there and imprint it + with metadata. This will mark this path as container. + + Args: + context (dict): application context + name (str): subset name + namespace (str): in Unreal this is basically path to container. + This is not passed here, so namespace is set + by `containerise()` because only then we know + real path. + data (dict): Those would be data to be imprinted. This is not used + now, data are imprinted by `containerise()`. + + Returns: + list(str): list of container content + """ + + # Create directory for asset and avalon container + root = "/Game/Avalon/Assets" + asset = context.get('asset').get('name') + suffix = "_CON" + if asset: + asset_name = "{}_{}".format(asset, name) + else: + asset_name = "{}".format(name) + + tools = unreal.AssetToolsHelpers().get_asset_tools() + asset_dir, container_name = tools.create_unique_asset_name( + "{}/{}/{}".format(root, asset, name), suffix="") + + container_name += suffix + + unreal.EditorAssetLibrary.make_directory(asset_dir) + + task = unreal.AssetImportTask() + + task.set_editor_property('filename', self.fname) + task.set_editor_property('destination_path', asset_dir) + task.set_editor_property('destination_name', asset_name) + task.set_editor_property('replace_existing', False) + task.set_editor_property('automated', True) + task.set_editor_property('save', True) + + # set import options here + # Unreal 4.24 ignores the settings. It works with Unreal 4.26 + options = unreal.AbcImportSettings() + options.set_editor_property( + 'import_type', unreal.AlembicImportType.GEOMETRY_CACHE) + + options.geometry_cache_settings.set_editor_property( + 'flatten_tracks', False) + + task.options = options + unreal.AssetToolsHelpers.get_asset_tools().import_asset_tasks([task]) # noqa: E501 + + # Create Asset Container + lib.create_avalon_container( + container=container_name, path=asset_dir) + + data = { + "schema": "openpype:container-2.0", + "id": pipeline.AVALON_CONTAINER_ID, + "asset": asset, + "namespace": asset_dir, + "container_name": container_name, + "asset_name": asset_name, + "loader": str(self.__class__.__name__), + "representation": context["representation"]["_id"], + "parent": context["representation"]["parent"], + "family": context["representation"]["context"]["family"] + } + unreal_pipeline.imprint( + "{}/{}".format(asset_dir, container_name), data) + + asset_content = unreal.EditorAssetLibrary.list_assets( + asset_dir, recursive=True, include_folder=True + ) + + for a in asset_content: + unreal.EditorAssetLibrary.save_asset(a) + + return asset_content + + def update(self, container, representation): + name = container["asset_name"] + source_path = api.get_representation_path(representation) + destination_path = container["namespace"] + + task = unreal.AssetImportTask() + + task.set_editor_property('filename', source_path) + task.set_editor_property('destination_path', destination_path) + # strip suffix + task.set_editor_property('destination_name', name) + task.set_editor_property('replace_existing', True) + task.set_editor_property('automated', True) + task.set_editor_property('save', True) + + # set import options here + # Unreal 4.24 ignores the settings. It works with Unreal 4.26 + options = unreal.AbcImportSettings() + options.set_editor_property( + 'import_type', unreal.AlembicImportType.GEOMETRY_CACHE) + + options.geometry_cache_settings.set_editor_property( + 'flatten_tracks', False) + + task.options = options + # do import fbx and replace existing data + unreal.AssetToolsHelpers.get_asset_tools().import_asset_tasks([task]) + container_path = "{}/{}".format(container["namespace"], + container["objectName"]) + # update metadata + unreal_pipeline.imprint( + container_path, + { + "representation": str(representation["_id"]), + "parent": str(representation["parent"]) + }) + + asset_content = unreal.EditorAssetLibrary.list_assets( + destination_path, recursive=True, include_folder=True + ) + + for a in asset_content: + unreal.EditorAssetLibrary.save_asset(a) + + def remove(self, container): + path = container["namespace"] + parent_path = os.path.dirname(path) + + unreal.EditorAssetLibrary.delete_directory(path) + + asset_content = unreal.EditorAssetLibrary.list_assets( + parent_path, recursive=False + ) + + if len(asset_content) == 0: + unreal.EditorAssetLibrary.delete_directory(parent_path) diff --git a/openpype/hosts/unreal/plugins/load/load_alembic_skeletalmesh.py b/openpype/hosts/unreal/plugins/load/load_alembic_skeletalmesh.py new file mode 100644 index 0000000000..b652af0b89 --- /dev/null +++ b/openpype/hosts/unreal/plugins/load/load_alembic_skeletalmesh.py @@ -0,0 +1,156 @@ +import os + +from avalon import api, pipeline +from avalon.unreal import lib +from avalon.unreal import pipeline as unreal_pipeline +import unreal + + +class SkeletalMeshAlembicLoader(api.Loader): + """Load Unreal SkeletalMesh from Alembic""" + + families = ["pointcache"] + label = "Import Alembic Skeletal Mesh" + representations = ["abc"] + icon = "cube" + color = "orange" + + def load(self, context, name, namespace, data): + """ + Load and containerise representation into Content Browser. + + This is two step process. First, import FBX to temporary path and + then call `containerise()` on it - this moves all content to new + directory and then it will create AssetContainer there and imprint it + with metadata. This will mark this path as container. + + Args: + context (dict): application context + name (str): subset name + namespace (str): in Unreal this is basically path to container. + This is not passed here, so namespace is set + by `containerise()` because only then we know + real path. + data (dict): Those would be data to be imprinted. This is not used + now, data are imprinted by `containerise()`. + + Returns: + list(str): list of container content + """ + + # Create directory for asset and avalon container + root = "/Game/Avalon/Assets" + asset = context.get('asset').get('name') + suffix = "_CON" + if asset: + asset_name = "{}_{}".format(asset, name) + else: + asset_name = "{}".format(name) + + tools = unreal.AssetToolsHelpers().get_asset_tools() + asset_dir, container_name = tools.create_unique_asset_name( + "{}/{}/{}".format(root, asset, name), suffix="") + + container_name += suffix + + unreal.EditorAssetLibrary.make_directory(asset_dir) + + task = unreal.AssetImportTask() + + task.set_editor_property('filename', self.fname) + task.set_editor_property('destination_path', asset_dir) + task.set_editor_property('destination_name', asset_name) + task.set_editor_property('replace_existing', False) + task.set_editor_property('automated', True) + task.set_editor_property('save', True) + + # set import options here + # Unreal 4.24 ignores the settings. It works with Unreal 4.26 + options = unreal.AbcImportSettings() + options.set_editor_property( + 'import_type', unreal.AlembicImportType.SKELETAL) + + task.options = options + unreal.AssetToolsHelpers.get_asset_tools().import_asset_tasks([task]) # noqa: E501 + + # Create Asset Container + lib.create_avalon_container( + container=container_name, path=asset_dir) + + data = { + "schema": "openpype:container-2.0", + "id": pipeline.AVALON_CONTAINER_ID, + "asset": asset, + "namespace": asset_dir, + "container_name": container_name, + "asset_name": asset_name, + "loader": str(self.__class__.__name__), + "representation": context["representation"]["_id"], + "parent": context["representation"]["parent"], + "family": context["representation"]["context"]["family"] + } + unreal_pipeline.imprint( + "{}/{}".format(asset_dir, container_name), data) + + asset_content = unreal.EditorAssetLibrary.list_assets( + asset_dir, recursive=True, include_folder=True + ) + + for a in asset_content: + unreal.EditorAssetLibrary.save_asset(a) + + return asset_content + + def update(self, container, representation): + name = container["asset_name"] + source_path = api.get_representation_path(representation) + destination_path = container["namespace"] + + task = unreal.AssetImportTask() + + task.set_editor_property('filename', source_path) + task.set_editor_property('destination_path', destination_path) + # strip suffix + task.set_editor_property('destination_name', name) + task.set_editor_property('replace_existing', True) + task.set_editor_property('automated', True) + task.set_editor_property('save', True) + + # set import options here + # Unreal 4.24 ignores the settings. It works with Unreal 4.26 + options = unreal.AbcImportSettings() + options.set_editor_property( + 'import_type', unreal.AlembicImportType.SKELETAL) + + task.options = options + # do import fbx and replace existing data + unreal.AssetToolsHelpers.get_asset_tools().import_asset_tasks([task]) + container_path = "{}/{}".format(container["namespace"], + container["objectName"]) + # update metadata + unreal_pipeline.imprint( + container_path, + { + "representation": str(representation["_id"]), + "parent": str(representation["parent"]) + }) + + asset_content = unreal.EditorAssetLibrary.list_assets( + destination_path, recursive=True, include_folder=True + ) + + for a in asset_content: + unreal.EditorAssetLibrary.save_asset(a) + + def remove(self, container): + path = container["namespace"] + parent_path = os.path.dirname(path) + + unreal.EditorAssetLibrary.delete_directory(path) + + asset_content = unreal.EditorAssetLibrary.list_assets( + parent_path, recursive=False + ) + + if len(asset_content) == 0: + unreal.EditorAssetLibrary.delete_directory(parent_path) diff --git a/openpype/hosts/unreal/plugins/load/load_alembic_staticmesh.py b/openpype/hosts/unreal/plugins/load/load_alembic_staticmesh.py new file mode 100644 index 0000000000..12b9320f72 --- /dev/null +++ b/openpype/hosts/unreal/plugins/load/load_alembic_staticmesh.py @@ -0,0 +1,156 @@ +import os + +from avalon import api, pipeline +from avalon.unreal import lib +from avalon.unreal import pipeline as unreal_pipeline +import unreal + + +class StaticMeshAlembicLoader(api.Loader): + """Load Unreal StaticMesh from Alembic""" + + families = ["model"] + label = "Import Alembic Static Mesh" + representations = ["abc"] + icon = "cube" + color = "orange" + + def load(self, context, name, namespace, data): + """ + Load and containerise representation into Content Browser. + + This is two step process. First, import FBX to temporary path and + then call `containerise()` on it - this moves all content to new + directory and then it will create AssetContainer there and imprint it + with metadata. This will mark this path as container. + + Args: + context (dict): application context + name (str): subset name + namespace (str): in Unreal this is basically path to container. + This is not passed here, so namespace is set + by `containerise()` because only then we know + real path. + data (dict): Those would be data to be imprinted. This is not used + now, data are imprinted by `containerise()`. + + Returns: + list(str): list of container content + """ + + # Create directory for asset and avalon container + root = "/Game/Avalon/Assets" + asset = context.get('asset').get('name') + suffix = "_CON" + if asset: + asset_name = "{}_{}".format(asset, name) + else: + asset_name = "{}".format(name) + + tools = unreal.AssetToolsHelpers().get_asset_tools() + asset_dir, container_name = tools.create_unique_asset_name( + "{}/{}/{}".format(root, asset, name), suffix="") + + container_name += suffix + + unreal.EditorAssetLibrary.make_directory(asset_dir) + + task = unreal.AssetImportTask() + + task.set_editor_property('filename', self.fname) + task.set_editor_property('destination_path', asset_dir) + task.set_editor_property('destination_name', asset_name) + task.set_editor_property('replace_existing', False) + task.set_editor_property('automated', True) + task.set_editor_property('save', True) + + # set import options here + # Unreal 4.24 ignores the settings. It works with Unreal 4.26 + options = unreal.AbcImportSettings() + options.set_editor_property( + 'import_type', unreal.AlembicImportType.STATIC_MESH) + + task.options = options + unreal.AssetToolsHelpers.get_asset_tools().import_asset_tasks([task]) # noqa: E501 + + # Create Asset Container + lib.create_avalon_container( + container=container_name, path=asset_dir) + + data = { + "schema": "openpype:container-2.0", + "id": pipeline.AVALON_CONTAINER_ID, + "asset": asset, + "namespace": asset_dir, + "container_name": container_name, + "asset_name": asset_name, + "loader": str(self.__class__.__name__), + "representation": context["representation"]["_id"], + "parent": context["representation"]["parent"], + "family": context["representation"]["context"]["family"] + } + unreal_pipeline.imprint( + "{}/{}".format(asset_dir, container_name), data) + + asset_content = unreal.EditorAssetLibrary.list_assets( + asset_dir, recursive=True, include_folder=True + ) + + for a in asset_content: + unreal.EditorAssetLibrary.save_asset(a) + + return asset_content + + def update(self, container, representation): + name = container["asset_name"] + source_path = api.get_representation_path(representation) + destination_path = container["namespace"] + + task = unreal.AssetImportTask() + + task.set_editor_property('filename', source_path) + task.set_editor_property('destination_path', destination_path) + # strip suffix + task.set_editor_property('destination_name', name) + task.set_editor_property('replace_existing', True) + task.set_editor_property('automated', True) + task.set_editor_property('save', True) + + # set import options here + # Unreal 4.24 ignores the settings. It works with Unreal 4.26 + options = unreal.AbcImportSettings() + options.set_editor_property( + 'import_type', unreal.AlembicImportType.STATIC_MESH) + + task.options = options + # do import fbx and replace existing data + unreal.AssetToolsHelpers.get_asset_tools().import_asset_tasks([task]) + container_path = "{}/{}".format(container["namespace"], + container["objectName"]) + # update metadata + unreal_pipeline.imprint( + container_path, + { + "representation": str(representation["_id"]), + "parent": str(representation["parent"]) + }) + + asset_content = unreal.EditorAssetLibrary.list_assets( + destination_path, recursive=True, include_folder=True + ) + + for a in asset_content: + unreal.EditorAssetLibrary.save_asset(a) + + def remove(self, container): + path = container["namespace"] + parent_path = os.path.dirname(path) + + unreal.EditorAssetLibrary.delete_directory(path) + + asset_content = unreal.EditorAssetLibrary.list_assets( + parent_path, recursive=False + ) + + if len(asset_content) == 0: + unreal.EditorAssetLibrary.delete_directory(parent_path) diff --git a/openpype/hosts/unreal/plugins/load/load_staticmeshfbx.py b/openpype/hosts/unreal/plugins/load/load_staticmeshfbx.py index dbea1d5951..dcb566fa4c 100644 --- a/openpype/hosts/unreal/plugins/load/load_staticmeshfbx.py +++ b/openpype/hosts/unreal/plugins/load/load_staticmeshfbx.py @@ -1,7 +1,6 @@ import os from avalon import api, pipeline -from avalon import unreal as avalon_unreal from avalon.unreal import lib from avalon.unreal import pipeline as unreal_pipeline import unreal diff --git a/openpype/launcher_actions.py b/openpype/launcher_actions.py deleted file mode 100644 index cf68dfb5c1..0000000000 --- a/openpype/launcher_actions.py +++ /dev/null @@ -1,30 +0,0 @@ -import os -import sys - -from avalon import api, pipeline - -PACKAGE_DIR = os.path.dirname(__file__) -PLUGINS_DIR = os.path.join(PACKAGE_DIR, "plugins", "launcher") -ACTIONS_DIR = os.path.join(PLUGINS_DIR, "actions") - - -def register_launcher_actions(): - """Register specific actions which should be accessible in the launcher""" - - actions = [] - ext = ".py" - sys.path.append(ACTIONS_DIR) - - for f in os.listdir(ACTIONS_DIR): - file, extention = os.path.splitext(f) - if ext in extention: - module = __import__(file) - klass = getattr(module, file) - actions.append(klass) - - if actions is []: - return - - for action in actions: - print("Using launcher action from config @ '{}'".format(action.name)) - pipeline.register_plugin(api.Action, action) diff --git a/openpype/lib/__init__.py b/openpype/lib/__init__.py index 554c0d8ec3..1df89dbb21 100644 --- a/openpype/lib/__init__.py +++ b/openpype/lib/__init__.py @@ -6,11 +6,21 @@ import sys import os import site -# add Python version specific vendor folder -site.addsitedir( - os.path.join( - os.getenv("OPENPYPE_ROOT", ""), - "vendor", "python", "python_{}".format(sys.version[0]))) +# Add Python version specific vendor folder +python_version_dir = os.path.join( + os.getenv("OPENPYPE_REPOS_ROOT", ""), + "openpype", "vendor", "python", "python_{}".format(sys.version[0]) +) +# Prepend path in sys paths +sys.path.insert(0, python_version_dir) +site.addsitedir(python_version_dir) + + +from .env_tools import ( + env_value_to_bool, + get_paths_from_environ, + get_global_environments +) from .terminal import Terminal from .execute import ( @@ -33,10 +43,11 @@ from .anatomy import ( from .config import get_datetime_data -from .env_tools import ( - env_value_to_bool, - get_paths_from_environ, - get_global_environments +from .vendor_bin_utils import ( + get_vendor_bin_path, + get_oiio_tools_path, + get_ffmpeg_tool_path, + ffprobe_streams ) from .python_module_tools import ( @@ -68,6 +79,16 @@ from .avalon_context import ( change_timer_to_current_context ) +from .local_settings import ( + IniSettingRegistry, + JSONSettingRegistry, + OpenPypeSecureRegistry, + OpenPypeSettingsRegistry, + get_local_site_id, + change_openpype_mongo_url, + get_openpype_username +) + from .applications import ( ApplicationLaunchFailed, ApplictionExecutableNotFound, @@ -92,6 +113,7 @@ from .plugin_tools import ( TaskNotSetError, get_subset_name, filter_pyblish_plugins, + set_plugin_attributes_from_settings, source_hash, get_unique_layer_name, get_background_layers, @@ -101,25 +123,12 @@ from .plugin_tools import ( should_decompress ) -from .local_settings import ( - IniSettingRegistry, - JSONSettingRegistry, - PypeSettingsRegistry, - get_local_site_id, - change_openpype_mongo_url -) - from .path_tools import ( version_up, get_version_from_path, get_last_version_from_path ) -from .ffmpeg_utils import ( - get_ffmpeg_tool_path, - ffprobe_streams -) - from .editorial import ( is_overlapping_otio_ranges, otio_range_to_frame_range, @@ -142,6 +151,11 @@ __all__ = [ "get_paths_from_environ", "get_global_environments", + "get_vendor_bin_path", + "get_oiio_tools_path", + "get_ffmpeg_tool_path", + "ffprobe_streams", + "modules_from_path", "recursive_bases_from_class", "classes_from_module", @@ -167,6 +181,14 @@ __all__ = [ "change_timer_to_current_context", + "IniSettingRegistry", + "JSONSettingRegistry", + "OpenPypeSecureRegistry", + "OpenPypeSettingsRegistry", + "get_local_site_id", + "change_openpype_mongo_url", + "get_openpype_username", + "ApplicationLaunchFailed", "ApplictionExecutableNotFound", "ApplicationNotFound", @@ -186,6 +208,7 @@ __all__ = [ "TaskNotSetError", "get_subset_name", "filter_pyblish_plugins", + "set_plugin_attributes_from_settings", "source_hash", "get_unique_layer_name", "get_background_layers", @@ -198,9 +221,6 @@ __all__ = [ "get_version_from_path", "get_last_version_from_path", - "ffprobe_streams", - "get_ffmpeg_tool_path", - "terminal", "merge_dict", @@ -215,12 +235,6 @@ __all__ = [ "validate_mongo_connection", "OpenPypeMongoConnection", - "IniSettingRegistry", - "JSONSettingRegistry", - "PypeSettingsRegistry", - "get_local_site_id", - "change_openpype_mongo_url", - "timeit", "is_overlapping_otio_ranges", diff --git a/openpype/lib/anatomy.py b/openpype/lib/anatomy.py index ff15aec41a..c16c6e2e99 100644 --- a/openpype/lib/anatomy.py +++ b/openpype/lib/anatomy.py @@ -216,7 +216,7 @@ class Anatomy: """Returns value of root key from template.""" root_templates = [] for group in re.findall(self.root_key_regex, template): - root_templates.append(group) + root_templates.append("{" + group + "}") if not root_templates: return None diff --git a/openpype/lib/applications.py b/openpype/lib/applications.py index e043c9d05c..c5c192f51b 100644 --- a/openpype/lib/applications.py +++ b/openpype/lib/applications.py @@ -17,10 +17,15 @@ from openpype.settings import ( get_project_settings, get_environments ) +from openpype.settings.constants import ( + METADATA_KEYS, + M_DYNAMIC_KEY_LABEL +) from . import ( PypeLogger, Anatomy ) +from .local_settings import get_openpype_username from .avalon_context import ( get_workdir_data, get_workdir_with_workdir_data @@ -72,7 +77,7 @@ class ApplictionExecutableNotFound(Exception): for executable in application.executables: details += "\n- " + executable.executable_path - self.msg = msg.format(application.full_label, application.name) + self.msg = msg.format(application.full_label, application.full_name) self.details = details exc_mgs = str(self.msg) @@ -123,7 +128,16 @@ class ApplicationGroup: self.host_name = host_name variants = data.get("variants") or {} + key_label_mapping = variants.pop(M_DYNAMIC_KEY_LABEL, {}) for variant_name, variant_data in variants.items(): + if variant_name in METADATA_KEYS: + continue + + if "variant_label" not in variant_data: + variant_label = key_label_mapping.get(variant_name) + if variant_label: + variant_data["variant_label"] = variant_label + variants[variant_name] = Application( variant_name, variant_data, self ) @@ -165,7 +179,8 @@ class Application: enabled = False if group.enabled: enabled = data.get("enabled", True) - self.enabled = enabled + self.enabled = enabled + self.use_python_2 = data["use_python_2"] self.label = data.get("variant_label") or name self.full_name = "/".join((group.name, name)) @@ -244,18 +259,36 @@ class Application: Returns: subprocess.Popen: Return executed process as Popen object. """ - return self.manager.launch(self.name, *args, **kwargs) + return self.manager.launch(self.full_name, *args, **kwargs) class ApplicationManager: - def __init__(self): - self.log = PypeLogger().get_logger(self.__class__.__name__) + """Load applications and tools and store them by their full name. + + Args: + system_settings (dict): Preloaded system settings. When passed manager + will always use these values. Gives ability to create manager + using different settings. + """ + def __init__(self, system_settings=None): + self.log = PypeLogger.get_logger(self.__class__.__name__) self.app_groups = {} self.applications = {} self.tool_groups = {} self.tools = {} + self._system_settings = system_settings + + self.refresh() + + def set_system_settings(self, system_settings): + """Ability to change init system settings. + + This will trigger refresh of manager. + """ + self._system_settings = system_settings + self.refresh() def refresh(self): @@ -265,22 +298,34 @@ class ApplicationManager: self.tool_groups.clear() self.tools.clear() - settings = get_system_settings() + if self._system_settings is not None: + settings = copy.deepcopy(self._system_settings) + else: + settings = get_system_settings( + clear_metadata=False, exclude_locals=False + ) app_defs = settings["applications"] for group_name, variant_defs in app_defs.items(): + if group_name in METADATA_KEYS: + continue + group = ApplicationGroup(group_name, variant_defs, self) self.app_groups[group_name] = group for app in group: - # TODO This should be replaced with `full_name` in future - self.applications[app.name] = app + self.applications[app.full_name] = app tools_definitions = settings["tools"]["tool_groups"] + tool_label_mapping = tools_definitions.pop(M_DYNAMIC_KEY_LABEL, {}) for tool_group_name, tool_group_data in tools_definitions.items(): - if not tool_group_name: + if not tool_group_name or tool_group_name in METADATA_KEYS: continue + + tool_group_label = ( + tool_label_mapping.get(tool_group_name) or tool_group_name + ) group = EnvironmentToolGroup( - tool_group_name, tool_group_data, self + tool_group_name, tool_group_label, tool_group_data, self ) self.tool_groups[tool_group_name] = group for tool in group: @@ -336,16 +381,24 @@ class EnvironmentToolGroup: manager (ApplicationManager): Manager that creates the group. """ - def __init__(self, name, data, manager): + def __init__(self, name, label, data, manager): self.name = name + self.label = label self._data = data self.manager = manager self._environment = data["environment"] variants = data.get("variants") or {} + label_by_key = variants.pop(M_DYNAMIC_KEY_LABEL, {}) variants_by_name = {} for variant_name, variant_env in variants.items(): - tool = EnvironmentTool(variant_name, variant_env, self) + if variant_name in METADATA_KEYS: + continue + + variant_label = label_by_key.get(variant_name) or variant_name + tool = EnvironmentTool( + variant_name, variant_label, variant_env, self + ) variants_by_name[variant_name] = tool self.variants = variants_by_name @@ -372,8 +425,10 @@ class EnvironmentTool: group (str): Name of group which wraps tool. """ - def __init__(self, name, environment, group): + def __init__(self, name, label, environment, group): self.name = name + self.variant_label = label + self.label = " ".join((group.label, label)) self.group = group self._environment = environment self.full_name = "/".join((group.name, name)) @@ -502,7 +557,7 @@ class LaunchHook: @property def app_name(self): - return getattr(self.application, "name", None) + return getattr(self.application, "full_name", None) def validate(self): """Optional validation of launch hook on initialization. @@ -804,10 +859,15 @@ class ApplicationLaunchContext: self.log.debug("All prelaunch hook executed. Starting new process.") # Prepare subprocess args - args = self.clear_launch_args(self.launch_args) - self.log.debug( - "Launching \"{}\" with args ({}): {}".format( - self.app_name, len(args), args + args_len_str = "" + if isinstance(self.launch_args, str): + args = self.launch_args + else: + args = self.clear_launch_args(self.launch_args) + args_len_str = " ({})".format(len(args)) + self.log.info( + "Launching \"{}\" with args{}: {}".format( + self.app_name, args_len_str, args ) ) # Run process @@ -853,7 +913,10 @@ class ApplicationLaunchContext: Return: list: Unpacked arguments. """ - while True: + if isinstance(args, str): + return args + all_cleared = False + while not all_cleared: all_cleared = True new_args = [] for arg in args: @@ -865,8 +928,6 @@ class ApplicationLaunchContext: new_args.append(arg) args = new_args - if all_cleared: - break return args @@ -939,7 +1000,7 @@ def get_app_environments_for_context( "project_name": project_name, "asset_name": asset_name, "task_name": task_name, - "app_name": app_name, + "app": app, "dbcon": dbcon, @@ -1117,8 +1178,7 @@ def prepare_context_environments(data): "AVALON_ASSET": asset_doc["name"], "AVALON_TASK": task_name, "AVALON_APP": app.host_name, - # TODO this hould be `app.full_name` in future PRs - "AVALON_APP_NAME": app.name, + "AVALON_APP_NAME": app.full_name, "AVALON_WORKDIR": workdir } log.debug( @@ -1187,7 +1247,7 @@ def _prepare_last_workfile(data, workdir): file_template = anatomy.templates["work"]["file"] workdir_data.update({ "version": 1, - "user": os.environ.get("OPENPYPE_USERNAME") or getpass.getuser(), + "user": get_openpype_username(), "ext": extensions[0] }) diff --git a/openpype/lib/avalon_context.py b/openpype/lib/avalon_context.py index 1f7c693b85..2d8726352a 100644 --- a/openpype/lib/avalon_context.py +++ b/openpype/lib/avalon_context.py @@ -1123,6 +1123,7 @@ class BuildWorkfile: return output +@with_avalon def get_creator_by_name(creator_name, case_sensitive=False): """Find creator plugin by name. diff --git a/openpype/lib/env_tools.py b/openpype/lib/env_tools.py index 025c13a322..ede14e00b2 100644 --- a/openpype/lib/env_tools.py +++ b/openpype/lib/env_tools.py @@ -1,5 +1,4 @@ import os -from openpype.settings import get_environments def env_value_to_bool(env_key=None, value=None, default=False): @@ -89,6 +88,7 @@ def get_global_environments(env=None): """ import acre from openpype.modules import ModulesManager + from openpype.settings import get_environments if env is None: env = {} diff --git a/openpype/lib/import_utils.py b/openpype/lib/import_utils.py index 5fca0ae5f9..4e72618803 100644 --- a/openpype/lib/import_utils.py +++ b/openpype/lib/import_utils.py @@ -8,7 +8,7 @@ log = Logger().get_logger(__name__) def discover_host_vendor_module(module_name): host = os.environ["AVALON_APP"] - pype_root = os.environ["OPENPYPE_ROOT"] + pype_root = os.environ["OPENPYPE_REPOS_ROOT"] main_module = module_name.split(".")[0] module_path = os.path.join( pype_root, "hosts", host, "vendor", main_module) diff --git a/openpype/lib/local_settings.py b/openpype/lib/local_settings.py index 82507cb0c0..67845c77cf 100644 --- a/openpype/lib/local_settings.py +++ b/openpype/lib/local_settings.py @@ -1,10 +1,13 @@ # -*- coding: utf-8 -*- """Package to deal with saving and retrieving user specific settings.""" import os +import json +import getpass +import platform from datetime import datetime from abc import ABCMeta, abstractmethod -import json +# TODO Use pype igniter logic instead of using duplicated code # disable lru cache in Python 2 try: from functools import lru_cache @@ -23,13 +26,117 @@ try: except ImportError: import ConfigParser as configparser -import platform - -import appdirs import six +import appdirs + +from openpype.settings import get_local_settings from .import validate_mongo_connection +_PLACEHOLDER = object() + + +class OpenPypeSecureRegistry: + """Store information using keyring. + + Registry should be used for private data that should be available only for + user. + + All passed registry names will have added prefix `OpenPype/` to easier + identify which data were created by OpenPype. + + Args: + name(str): Name of registry used as identifier for data. + """ + def __init__(self, name): + try: + import keyring + + except Exception: + raise NotImplementedError( + "Python module `keyring` is not available." + ) + + # hack for cx_freeze and Windows keyring backend + if platform.system().lower() == "windows": + from keyring.backends import Windows + + keyring.set_keyring(Windows.WinVaultKeyring()) + + # Force "OpenPype" prefix + self._name = "/".join(("OpenPype", name)) + + def set_item(self, name, value): + # type: (str, str) -> None + """Set sensitive item into system's keyring. + + This uses `Keyring module`_ to save sensitive stuff into system's + keyring. + + Args: + name (str): Name of the item. + value (str): Value of the item. + + .. _Keyring module: + https://github.com/jaraco/keyring + + """ + import keyring + + keyring.set_password(self._name, name, value) + + @lru_cache(maxsize=32) + def get_item(self, name, default=_PLACEHOLDER): + """Get value of sensitive item from system's keyring. + + See also `Keyring module`_ + + Args: + name (str): Name of the item. + default (Any): Default value if item is not available. + + Returns: + value (str): Value of the item. + + Raises: + ValueError: If item doesn't exist and default is not defined. + + .. _Keyring module: + https://github.com/jaraco/keyring + + """ + import keyring + + value = keyring.get_password(self._name, name) + if value is not None: + return value + + if default is not _PLACEHOLDER: + return default + + # NOTE Should raise `KeyError` + raise ValueError( + "Item {}:{} does not exist in keyring.".format(self._name, name) + ) + + def delete_item(self, name): + # type: (str) -> None + """Delete value stored in system's keyring. + + See also `Keyring module`_ + + Args: + name (str): Name of the item to be deleted. + + .. _Keyring module: + https://github.com/jaraco/keyring + + """ + import keyring + + self.get_item.cache_clear() + keyring.delete_password(self._name, name) + @six.add_metaclass(ABCMeta) class ASettingRegistry(): @@ -48,13 +155,6 @@ class ASettingRegistry(): # type: (str) -> ASettingRegistry super(ASettingRegistry, self).__init__() - if six.PY3: - import keyring - # hack for cx_freeze and Windows keyring backend - if platform.system() == "Windows": - from keyring.backends import Windows - keyring.set_keyring(Windows.WinVaultKeyring()) - self._name = name self._items = {} @@ -120,7 +220,7 @@ class ASettingRegistry(): """Delete item from settings. Note: - see :meth:`pype.lib.local_settings.ARegistrySettings.delete_item` + see :meth:`openpype.lib.user_settings.ARegistrySettings.delete_item` """ pass @@ -129,78 +229,6 @@ class ASettingRegistry(): del self._items[name] self._delete_item(name) - def set_secure_item(self, name, value): - # type: (str, str) -> None - """Set sensitive item into system's keyring. - - This uses `Keyring module`_ to save sensitive stuff into system's - keyring. - - Args: - name (str): Name of the item. - value (str): Value of the item. - - .. _Keyring module: - https://github.com/jaraco/keyring - - """ - if six.PY2: - raise NotImplementedError( - "Keyring not available on Python 2 hosts") - import keyring - keyring.set_password(self._name, name, value) - - @lru_cache(maxsize=32) - def get_secure_item(self, name): - # type: (str) -> str - """Get value of sensitive item from system's keyring. - - See also `Keyring module`_ - - Args: - name (str): Name of the item. - - Returns: - value (str): Value of the item. - - Raises: - ValueError: If item doesn't exist. - - .. _Keyring module: - https://github.com/jaraco/keyring - - """ - if six.PY2: - raise NotImplementedError( - "Keyring not available on Python 2 hosts") - import keyring - value = keyring.get_password(self._name, name) - if not value: - raise ValueError( - "Item {}:{} does not exist in keyring.".format( - self._name, name)) - return value - - def delete_secure_item(self, name): - # type: (str) -> None - """Delete value stored in system's keyring. - - See also `Keyring module`_ - - Args: - name (str): Name of the item to be deleted. - - .. _Keyring module: - https://github.com/jaraco/keyring - - """ - if six.PY2: - raise NotImplementedError( - "Keyring not available on Python 2 hosts") - import keyring - self.get_secure_item.cache_clear() - keyring.delete_password(self._name, name) - class IniSettingRegistry(ASettingRegistry): """Class using :mod:`configparser`. @@ -218,7 +246,7 @@ class IniSettingRegistry(ASettingRegistry): if not os.path.exists(self._registry_file): with open(self._registry_file, mode="w") as cfg: print("# Settings registry", cfg) - print("# Generated by Pype {}".format(version), cfg) + print("# Generated by OpenPype {}".format(version), cfg) now = datetime.now().strftime("%d/%m/%Y %H:%M:%S") print("# {}".format(now), cfg) @@ -352,7 +380,7 @@ class IniSettingRegistry(ASettingRegistry): """Delete item from default section. Note: - See :meth:`~pype.lib.IniSettingsRegistry.delete_item_from_section` + See :meth:`~openpype.lib.IniSettingsRegistry.delete_item_from_section` """ self.delete_item_from_section("MAIN", name) @@ -369,7 +397,7 @@ class JSONSettingRegistry(ASettingRegistry): now = datetime.now().strftime("%d/%m/%Y %H:%M:%S") header = { "__metadata__": { - "pype-version": os.getenv("OPENPYPE_VERSION", "N/A"), + "openpype-version": os.getenv("OPENPYPE_VERSION", "N/A"), "generated": now }, "registry": {} @@ -387,7 +415,7 @@ class JSONSettingRegistry(ASettingRegistry): """Get item value from registry json. Note: - See :meth:`pype.lib.JSONSettingRegistry.get_item` + See :meth:`openpype.lib.JSONSettingRegistry.get_item` """ with open(self._registry_file, mode="r") as cfg: @@ -420,7 +448,7 @@ class JSONSettingRegistry(ASettingRegistry): """Set item value to registry json. Note: - See :meth:`pype.lib.JSONSettingRegistry.set_item` + See :meth:`openpype.lib.JSONSettingRegistry.set_item` """ with open(self._registry_file, "r+") as cfg: @@ -452,8 +480,8 @@ class JSONSettingRegistry(ASettingRegistry): json.dump(data, cfg, indent=4) -class PypeSettingsRegistry(JSONSettingRegistry): - """Class handling Pype general settings registry. +class OpenPypeSettingsRegistry(JSONSettingRegistry): + """Class handling OpenPype general settings registry. Attributes: vendor (str): Name used for path construction. @@ -461,21 +489,23 @@ class PypeSettingsRegistry(JSONSettingRegistry): """ - def __init__(self): + def __init__(self, name=None): self.vendor = "pypeclub" - self.product = "pype" + self.product = "openpype" + if not name: + name = "openpype_settings" path = appdirs.user_data_dir(self.product, self.vendor) - super(PypeSettingsRegistry, self).__init__("pype_settings", path) + super(OpenPypeSettingsRegistry, self).__init__(name, path) def _create_local_site_id(registry=None): """Create a local site identifier.""" - from uuid import uuid4 + from coolname import generate_slug if registry is None: - registry = PypeSettingsRegistry() + registry = OpenPypeSettingsRegistry() - new_id = str(uuid4()) + new_id = generate_slug(3) print("Created local site id \"{}\"".format(new_id)) @@ -489,7 +519,7 @@ def get_local_site_id(): Identifier is created if does not exists yet. """ - registry = PypeSettingsRegistry() + registry = OpenPypeSettingsRegistry() try: return registry.get_item("localId") except ValueError: @@ -504,5 +534,31 @@ def change_openpype_mongo_url(new_mongo_url): """ validate_mongo_connection(new_mongo_url) - registry = PypeSettingsRegistry() - registry.set_secure_item("pypeMongo", new_mongo_url) + key = "openPypeMongo" + registry = OpenPypeSecureRegistry("mongodb") + existing_value = registry.get_item(key, None) + if existing_value is not None: + registry.delete_item(key) + registry.set_item(key, new_mongo_url) + + +def get_openpype_username(): + """OpenPype username used for templates and publishing. + + May be different than machine's username. + + Always returns "OPENPYPE_USERNAME" environment if is set then tries local + settings and last option is to use `getpass.getuser()` which returns + machine username. + """ + username = os.environ.get("OPENPYPE_USERNAME") + if not username: + local_settings = get_local_settings() + username = ( + local_settings + .get("general", {}) + .get("username") + ) + if not username: + username = getpass.getuser() + return username diff --git a/openpype/lib/log.py b/openpype/lib/log.py index 9745279e28..39b6c67080 100644 --- a/openpype/lib/log.py +++ b/openpype/lib/log.py @@ -123,6 +123,8 @@ class PypeFormatter(logging.Formatter): if record.exc_info is not None: line_len = len(str(record.exc_info[1])) + if line_len > 30: + line_len = 30 out = "{}\n{}\n{}\n{}\n{}".format( out, line_len * "=", diff --git a/openpype/lib/plugin_tools.py b/openpype/lib/plugin_tools.py index eb024383d3..44c688456e 100644 --- a/openpype/lib/plugin_tools.py +++ b/openpype/lib/plugin_tools.py @@ -9,6 +9,7 @@ import tempfile from .execute import run_subprocess from .profiles_filtering import filter_profiles +from .vendor_bin_utils import get_oiio_tools_path from openpype.settings import get_project_settings @@ -127,7 +128,7 @@ def filter_pyblish_plugins(plugins): plugin_kind = file.split(os.path.sep)[-2:-1][0] # TODO: change after all plugins are moved one level up - if host_from_file == "pype": + if host_from_file == "openpype": host_from_file = "global" try: @@ -149,6 +150,95 @@ def filter_pyblish_plugins(plugins): setattr(plugin, option, value) +def set_plugin_attributes_from_settings( + plugins, superclass, host_name=None, project_name=None +): + """Change attribute values on Avalon plugins by project settings. + + This function should be used only in host context. Modify + behavior of plugins. + + Args: + plugins (list): Plugins discovered by origin avalon discover method. + superclass (object): Superclass of plugin type (e.g. Cretor, Loader). + host_name (str): Name of host for which plugins are loaded and from. + Value from environment `AVALON_APP` is used if not entered. + project_name (str): Name of project for which settings will be loaded. + Value from environment `AVALON_PROJECT` is used if not entered. + """ + + # determine host application to use for finding presets + if host_name is None: + host_name = os.environ.get("AVALON_APP") + + if project_name is None: + project_name = os.environ.get("AVALON_PROJECT") + + # map plugin superclass to preset json. Currenly suppoted is load and + # create (avalon.api.Loader and avalon.api.Creator) + plugin_type = None + if superclass.__name__.split(".")[-1] == "Loader": + plugin_type = "load" + elif superclass.__name__.split(".")[-1] == "Creator": + plugin_type = "create" + + if not host_name or not project_name or plugin_type is None: + msg = "Skipped attributes override from settings." + if not host_name: + msg += " Host name is not defined." + + if not project_name: + msg += " Project name is not defined." + + if plugin_type is None: + msg += " Plugin type is unsupported for class {}.".format( + superclass.__name__ + ) + + print(msg) + return + + print(">>> Finding presets for {}:{} ...".format(host_name, plugin_type)) + + project_settings = get_project_settings(project_name) + plugin_type_settings = ( + project_settings + .get(host_name, {}) + .get(plugin_type, {}) + ) + global_type_settings = ( + project_settings + .get("global", {}) + .get(plugin_type, {}) + ) + if not global_type_settings and not plugin_type_settings: + return + + for plugin in plugins: + plugin_name = plugin.__name__ + + plugin_settings = None + # Look for plugin settings in host specific settings + if plugin_name in plugin_type_settings: + plugin_settings = plugin_type_settings[plugin_name] + + # Look for plugin settings in global settings + elif plugin_name in global_type_settings: + plugin_settings = global_type_settings[plugin_name] + + if not plugin_settings: + continue + + print(">>> We have preset for {}".format(plugin_name)) + for option, value in plugin_settings.items(): + if option == "enabled" and value is False: + setattr(plugin, "active", False) + print(" - is disabled by preset") + else: + setattr(plugin, option, value) + print(" - setting `{}`: `{}`".format(option, value)) + + def source_hash(filepath, *args): """Generate simple identifier for a source file. This is used to identify whether a source file has previously been @@ -235,7 +325,7 @@ def oiio_supported(): Returns: (bool) """ - oiio_path = os.getenv("OPENPYPE_OIIO_PATH", "") + oiio_path = get_oiio_tools_path() if not oiio_path or not os.path.exists(oiio_path): log.debug("OIIOTool is not configured or not present at {}". format(oiio_path)) @@ -269,7 +359,7 @@ def decompress(target_dir, file_url, (int(input_frame_end) > int(input_frame_start)) oiio_cmd = [] - oiio_cmd.append(os.getenv("OPENPYPE_OIIO_PATH")) + oiio_cmd.append(get_oiio_tools_path()) oiio_cmd.append("--compression none") @@ -328,7 +418,7 @@ def should_decompress(file_url): """ if oiio_supported(): output = run_subprocess([ - os.getenv("OPENPYPE_OIIO_PATH"), + get_oiio_tools_path(), "--info", "-v", file_url]) return "compression: \"dwaa\"" in output or \ "compression: \"dwab\"" in output diff --git a/openpype/lib/pype_info.py b/openpype/lib/pype_info.py index 93d669eb0d..c56782be9e 100644 --- a/openpype/lib/pype_info.py +++ b/openpype/lib/pype_info.py @@ -28,7 +28,7 @@ def get_pype_info(): "version": get_pype_version(), "version_type": version_type, "executable": executable_args[-1], - "pype_root": os.environ["OPENPYPE_ROOT"], + "pype_root": os.environ["OPENPYPE_REPOS_ROOT"], "mongo_url": os.environ["OPENPYPE_MONGO"] } diff --git a/openpype/lib/ffmpeg_utils.py b/openpype/lib/vendor_bin_utils.py similarity index 50% rename from openpype/lib/ffmpeg_utils.py rename to openpype/lib/vendor_bin_utils.py index ba9f24c5d7..3b923cb608 100644 --- a/openpype/lib/ffmpeg_utils.py +++ b/openpype/lib/vendor_bin_utils.py @@ -1,33 +1,60 @@ import os import logging import json +import platform import subprocess -from . import get_paths_from_environ - log = logging.getLogger("FFmpeg utils") -def get_ffmpeg_tool_path(tool="ffmpeg"): - """Find path to ffmpeg tool in FFMPEG_PATH paths. +def get_vendor_bin_path(bin_app): + """Path to OpenPype vendorized binaries. - Function looks for tool in paths set in FFMPEG_PATH environment. If tool - exists then returns it's full path. + Vendorized executables are expected in specific hierarchy inside build or + in code source. + + "{OPENPYPE_ROOT}/vendor/bin/{name of vendorized app}/{platform}" Args: - tool (string): tool name + bin_app (str): Name of vendorized application. Returns: - (str): tool name itself when tool path was not found. (FFmpeg path - may be set in PATH environment variable) + str: Path to vendorized binaries folder. """ - dir_paths = get_paths_from_environ("FFMPEG_PATH") - for dir_path in dir_paths: - for file_name in os.listdir(dir_path): - base, _ext = os.path.splitext(file_name) - if base.lower() == tool.lower(): - return os.path.join(dir_path, tool) - return tool + return os.path.join( + os.environ["OPENPYPE_ROOT"], + "vendor", + "bin", + bin_app, + platform.system().lower() + ) + + +def get_oiio_tools_path(tool="oiiotool"): + """Path to vendorized OpenImageIO tool executables. + + Args: + tool (string): Tool name (oiiotool, maketx, ...). + Default is "oiiotool". + """ + oiio_dir = get_vendor_bin_path("oiio") + return os.path.join(oiio_dir, tool) + + +def get_ffmpeg_tool_path(tool="ffmpeg"): + """Path to vendorized FFmpeg executable. + + Args: + tool (string): Tool name (ffmpeg, ffprobe, ...). + Default is "ffmpeg". + + Returns: + str: Full path to ffmpeg executable. + """ + ffmpeg_dir = get_vendor_bin_path("ffmpeg") + if platform.system().lower() == "windows": + ffmpeg_dir = os.path.join(ffmpeg_dir, "bin") + return os.path.join(ffmpeg_dir, tool) def ffprobe_streams(path_to_file, logger=None): diff --git a/openpype/modules/__init__.py b/openpype/modules/__init__.py index 4b120647e1..bae48c540b 100644 --- a/openpype/modules/__init__.py +++ b/openpype/modules/__init__.py @@ -18,10 +18,6 @@ from .webserver import ( WebServerModule, IWebServerRoutes ) -from .user import ( - UserModule, - IUserModule -) from .idle_manager import ( IdleManager, IIdleManager @@ -41,7 +37,7 @@ from .log_viewer import LogViewModule from .muster import MusterModule from .deadline import DeadlineModule from .standalonepublish_action import StandAlonePublishAction -from .sync_server import SyncServer +from .sync_server import SyncServerModule __all__ = ( @@ -60,9 +56,6 @@ __all__ = ( "WebServerModule", "IWebServerRoutes", - "UserModule", - "IUserModule", - "IdleManager", "IIdleManager", @@ -82,5 +75,5 @@ __all__ = ( "DeadlineModule", "StandAlonePublishAction", - "SyncServer" + "SyncServerModule" ) diff --git a/openpype/modules/clockify/clockify_api.py b/openpype/modules/clockify/clockify_api.py index d88b2ef8df..3f0a9799b4 100644 --- a/openpype/modules/clockify/clockify_api.py +++ b/openpype/modules/clockify/clockify_api.py @@ -1,13 +1,16 @@ import os import re import time -import requests import json import datetime +import requests from .constants import ( - CLOCKIFY_ENDPOINT, ADMIN_PERMISSION_NAMES, CREDENTIALS_JSON_PATH + CLOCKIFY_ENDPOINT, + ADMIN_PERMISSION_NAMES ) +from openpype.lib.local_settings import OpenPypeSecureRegistry + def time_check(obj): if obj.request_counter < 10: @@ -31,6 +34,13 @@ class ClockifyAPI: self.request_counter = 0 self.request_time = time.time() + self._secure_registry = None + + def secure_registry(self): + if self._secure_registry is None: + self._secure_registry = OpenPypeSecureRegistry("clockify") + return self._secure_registry + @property def headers(self): return {"X-Api-Key": self.api_key} @@ -129,22 +139,10 @@ class ClockifyAPI: return False def get_api_key(self): - api_key = None - try: - file = open(CREDENTIALS_JSON_PATH, 'r') - api_key = json.load(file).get('api_key', None) - if api_key == '': - api_key = None - except Exception: - file = open(CREDENTIALS_JSON_PATH, 'w') - file.close() - return api_key + return self.secure_registry.get_item("api_key", None) def save_api_key(self, api_key): - data = {'api_key': api_key} - file = open(CREDENTIALS_JSON_PATH, 'w') - file.write(json.dumps(data)) - file.close() + self.secure_registry.set_item("api_key", api_key) def get_workspaces(self): action_url = 'workspaces/' diff --git a/openpype/modules/clockify/constants.py b/openpype/modules/clockify/constants.py index 38ad4b64cf..66f6cb899a 100644 --- a/openpype/modules/clockify/constants.py +++ b/openpype/modules/clockify/constants.py @@ -1,17 +1,12 @@ import os -import appdirs CLOCKIFY_FTRACK_SERVER_PATH = os.path.join( - os.path.dirname(__file__), "ftrack", "server" + os.path.dirname(os.path.abspath(__file__)), "ftrack", "server" ) CLOCKIFY_FTRACK_USER_PATH = os.path.join( - os.path.dirname(__file__), "ftrack", "user" + os.path.dirname(os.path.abspath(__file__)), "ftrack", "user" ) -CREDENTIALS_JSON_PATH = os.path.normpath(os.path.join( - appdirs.user_data_dir("pype-app", "pype"), - "clockify.json" -)) ADMIN_PERMISSION_NAMES = ["WORKSPACE_OWN", "WORKSPACE_ADMIN"] CLOCKIFY_ENDPOINT = "https://api.clockify.me/api/" diff --git a/openpype/modules/deadline/plugins/publish/submit_aftereffects_deadline.py b/openpype/modules/deadline/plugins/publish/submit_aftereffects_deadline.py index 38a6b9b246..69159fda1a 100644 --- a/openpype/modules/deadline/plugins/publish/submit_aftereffects_deadline.py +++ b/openpype/modules/deadline/plugins/publish/submit_aftereffects_deadline.py @@ -64,7 +64,6 @@ class AfterEffectsSubmitDeadline(abstract_submit_deadline.AbstractSubmitDeadline "AVALON_ASSET", "AVALON_TASK", "AVALON_APP_NAME", - "OPENPYPE_USERNAME", "OPENPYPE_DEV", "OPENPYPE_LOG_NO_COLORS" ] diff --git a/openpype/modules/deadline/plugins/publish/submit_harmony_deadline..py b/openpype/modules/deadline/plugins/publish/submit_harmony_deadline.py similarity index 99% rename from openpype/modules/deadline/plugins/publish/submit_harmony_deadline..py rename to openpype/modules/deadline/plugins/publish/submit_harmony_deadline.py index 58379443f7..37041a84b1 100644 --- a/openpype/modules/deadline/plugins/publish/submit_harmony_deadline..py +++ b/openpype/modules/deadline/plugins/publish/submit_harmony_deadline.py @@ -252,7 +252,7 @@ class HarmonySubmitDeadline( def get_job_info(self): job_info = DeadlineJobInfo("Harmony") job_info.Name = self._instance.data["name"] - job_info.Plugin = "HarmonyPype" + job_info.Plugin = "HarmonyOpenPype" job_info.Frames = "{}-{}".format( self._instance.data["frameStartHandle"], self._instance.data["frameEndHandle"] @@ -273,7 +273,6 @@ class HarmonySubmitDeadline( "AVALON_ASSET", "AVALON_TASK", "AVALON_APP_NAME", - "OPENPYPE_USERNAME", "OPENPYPE_DEV", "OPENPYPE_LOG_NO_COLORS" ] diff --git a/openpype/modules/deadline/plugins/publish/submit_maya_deadline.py b/openpype/modules/deadline/plugins/publish/submit_maya_deadline.py index 3aea837bb1..a5841f406c 100644 --- a/openpype/modules/deadline/plugins/publish/submit_maya_deadline.py +++ b/openpype/modules/deadline/plugins/publish/submit_maya_deadline.py @@ -47,7 +47,7 @@ payload_skeleton_template = { "BatchName": None, # Top-level group name "Name": None, # Job name, as seen in Monitor "UserName": None, - "Plugin": "MayaPype", + "Plugin": "MayaBatch", "Frames": "{start}-{end}x{step}", "Comment": None, "Priority": 50, @@ -396,7 +396,7 @@ class MayaSubmitDeadline(pyblish.api.InstancePlugin): step=int(self._instance.data["byFrameStep"])) self.payload_skeleton["JobInfo"]["Plugin"] = self._instance.data.get( - "mayaRenderPlugin", "MayaPype") + "mayaRenderPlugin", "MayaBatch") self.payload_skeleton["JobInfo"]["BatchName"] = filename # Job name, as seen in Monitor @@ -441,7 +441,6 @@ class MayaSubmitDeadline(pyblish.api.InstancePlugin): "AVALON_ASSET", "AVALON_TASK", "AVALON_APP_NAME", - "OPENPYPE_USERNAME", "OPENPYPE_DEV", "OPENPYPE_LOG_NO_COLORS" ] diff --git a/openpype/modules/deadline/plugins/publish/submit_nuke_deadline.py b/openpype/modules/deadline/plugins/publish/submit_nuke_deadline.py index 2e30e624ef..7faa3393e5 100644 --- a/openpype/modules/deadline/plugins/publish/submit_nuke_deadline.py +++ b/openpype/modules/deadline/plugins/publish/submit_nuke_deadline.py @@ -31,6 +31,7 @@ class NukeSubmitDeadline(pyblish.api.InstancePlugin): group = "" department = "" limit_groups = {} + use_gpu = False def process(self, instance): instance.data["toBeRenderedOn"] = "deadline" @@ -206,6 +207,10 @@ class NukeSubmitDeadline(pyblish.api.InstancePlugin): # Resolve relative references "ProjectPath": script_path, "AWSAssetFile0": render_path, + + # using GPU by default + "UseGpu": self.use_gpu, + # Only the specific write node is rendered. "WriteNode": exe_node_name }, @@ -375,7 +380,7 @@ class NukeSubmitDeadline(pyblish.api.InstancePlugin): list: captured groups list """ captured_groups = [] - for lg_name, list_node_class in self.deadline_limit_groups.items(): + for lg_name, list_node_class in self.limit_groups.items(): for node_class in list_node_class: for node in nuke.allNodes(recurseGroups=True): # ignore all nodes not member of defined class diff --git a/openpype/modules/deadline/plugins/publish/submit_publish_job.py b/openpype/modules/deadline/plugins/publish/submit_publish_job.py index a2e21fb766..ea953441a2 100644 --- a/openpype/modules/deadline/plugins/publish/submit_publish_job.py +++ b/openpype/modules/deadline/plugins/publish/submit_publish_job.py @@ -102,7 +102,7 @@ class ProcessSubmittedJobOnFarm(pyblish.api.InstancePlugin): hosts = ["fusion", "maya", "nuke", "celaction", "aftereffects", "harmony"] - families = ["render.farm", "prerender", + families = ["render.farm", "prerender.farm", "renderlayer", "imagesequence", "vrayscene"] aov_filter = {"maya": [r".+(?:\.|_)([Bb]eauty)(?:\.|_).*"], diff --git a/openpype/modules/ftrack/event_handlers_server/action_prepare_project.py b/openpype/modules/ftrack/event_handlers_server/action_prepare_project.py new file mode 100644 index 0000000000..12d687bbf2 --- /dev/null +++ b/openpype/modules/ftrack/event_handlers_server/action_prepare_project.py @@ -0,0 +1,365 @@ +import json + +from openpype.api import ProjectSettings + +from openpype.modules.ftrack.lib import ( + ServerAction, + get_openpype_attr, + CUST_ATTR_AUTO_SYNC +) + + +class PrepareProjectServer(ServerAction): + """Prepare project attributes in Anatomy.""" + + identifier = "prepare.project.server" + label = "OpenPype Admin" + variant = "- Prepare Project (Server)" + description = "Set basic attributes on the project" + + settings_key = "prepare_project" + + role_list = ["Pypeclub", "Administrator", "Project Manager"] + + # Key to store info about trigerring create folder structure + item_splitter = {"type": "label", "value": "---"} + + def discover(self, session, entities, event): + """Show only on project.""" + if ( + len(entities) != 1 + or entities[0].entity_type.lower() != "project" + ): + return False + + return self.valid_roles(session, entities, event) + + def interface(self, session, entities, event): + if event['data'].get('values', {}): + return + + # Inform user that this may take a while + self.show_message(event, "Preparing data... Please wait", True) + self.log.debug("Preparing data which will be shown") + + self.log.debug("Loading custom attributes") + + project_entity = entities[0] + project_name = project_entity["full_name"] + + try: + project_settings = ProjectSettings(project_name) + except ValueError: + return { + "message": "Project is not synchronized yet", + "success": False + } + + project_anatom_settings = project_settings["project_anatomy"] + root_items = self.prepare_root_items(project_anatom_settings) + + ca_items, multiselect_enumerators = ( + self.prepare_custom_attribute_items(project_anatom_settings) + ) + + self.log.debug("Heavy items are ready. Preparing last items group.") + + title = "Prepare Project" + items = [] + + # Add root items + items.extend(root_items) + + items.append(self.item_splitter) + items.append({ + "type": "label", + "value": "

Set basic Attributes:

" + }) + + items.extend(ca_items) + + # This item will be last (before enumerators) + # - sets value of auto synchronization + auto_sync_name = "avalon_auto_sync" + auto_sync_value = project_entity["custom_attributes"].get( + CUST_ATTR_AUTO_SYNC, False + ) + auto_sync_item = { + "name": auto_sync_name, + "type": "boolean", + "value": auto_sync_value, + "label": "AutoSync to Avalon" + } + # Add autosync attribute + items.append(auto_sync_item) + + # Add enumerator items at the end + for item in multiselect_enumerators: + items.append(item) + + return { + "items": items, + "title": title + } + + def prepare_root_items(self, project_anatom_settings): + self.log.debug("Root items preparation begins.") + + root_items = [] + root_items.append({ + "type": "label", + "value": "

Check your Project root settings

" + }) + root_items.append({ + "type": "label", + "value": ( + "

NOTE: Roots are crutial for path filling" + " (and creating folder structure).

" + ) + }) + root_items.append({ + "type": "label", + "value": ( + "

WARNING: Do not change roots on running project," + " that will cause workflow issues.

" + ) + }) + + empty_text = "Enter root path here..." + + roots_entity = project_anatom_settings["roots"] + for root_name, root_entity in roots_entity.items(): + root_items.append(self.item_splitter) + root_items.append({ + "type": "label", + "value": "Root: \"{}\"".format(root_name) + }) + for platform_name, value_entity in root_entity.items(): + root_items.append({ + "label": platform_name, + "name": "__root__{}__{}".format(root_name, platform_name), + "type": "text", + "value": value_entity.value, + "empty_text": empty_text + }) + + root_items.append({ + "type": "hidden", + "name": "__rootnames__", + "value": json.dumps(list(roots_entity.keys())) + }) + + self.log.debug("Root items preparation ended.") + return root_items + + def _attributes_to_set(self, project_anatom_settings): + attributes_to_set = {} + + attribute_values_by_key = {} + for key, entity in project_anatom_settings["attributes"].items(): + attribute_values_by_key[key] = entity.value + + cust_attrs, hier_cust_attrs = get_openpype_attr(self.session, True) + + for attr in hier_cust_attrs: + key = attr["key"] + if key.startswith("avalon_"): + continue + attributes_to_set[key] = { + "label": attr["label"], + "object": attr, + "default": attribute_values_by_key.get(key) + } + + for attr in cust_attrs: + if attr["entity_type"].lower() != "show": + continue + key = attr["key"] + if key.startswith("avalon_"): + continue + attributes_to_set[key] = { + "label": attr["label"], + "object": attr, + "default": attribute_values_by_key.get(key) + } + + # Sort by label + attributes_to_set = dict(sorted( + attributes_to_set.items(), + key=lambda x: x[1]["label"] + )) + return attributes_to_set + + def prepare_custom_attribute_items(self, project_anatom_settings): + items = [] + multiselect_enumerators = [] + attributes_to_set = self._attributes_to_set(project_anatom_settings) + + self.log.debug("Preparing interface for keys: \"{}\"".format( + str([key for key in attributes_to_set]) + )) + + for key, in_data in attributes_to_set.items(): + attr = in_data["object"] + + # initial item definition + item = { + "name": key, + "label": in_data["label"] + } + + # cust attr type - may have different visualization + type_name = attr["type"]["name"].lower() + easy_types = ["text", "boolean", "date", "number"] + + easy_type = False + if type_name in easy_types: + easy_type = True + + elif type_name == "enumerator": + + attr_config = json.loads(attr["config"]) + attr_config_data = json.loads(attr_config["data"]) + + if attr_config["multiSelect"] is True: + multiselect_enumerators.append(self.item_splitter) + multiselect_enumerators.append({ + "type": "label", + "value": in_data["label"] + }) + + default = in_data["default"] + names = [] + for option in sorted( + attr_config_data, key=lambda x: x["menu"] + ): + name = option["value"] + new_name = "__{}__{}".format(key, name) + names.append(new_name) + item = { + "name": new_name, + "type": "boolean", + "label": "- {}".format(option["menu"]) + } + if default: + if isinstance(default, (list, tuple)): + if name in default: + item["value"] = True + else: + if name == default: + item["value"] = True + + multiselect_enumerators.append(item) + + multiselect_enumerators.append({ + "type": "hidden", + "name": "__hidden__{}".format(key), + "value": json.dumps(names) + }) + else: + easy_type = True + item["data"] = attr_config_data + + else: + self.log.warning(( + "Custom attribute \"{}\" has type \"{}\"." + " I don't know how to handle" + ).format(key, type_name)) + items.append({ + "type": "label", + "value": ( + "!!! Can't handle Custom attritubte type \"{}\"" + " (key: \"{}\")" + ).format(type_name, key) + }) + + if easy_type: + item["type"] = type_name + + # default value in interface + default = in_data["default"] + if default is not None: + item["value"] = default + + items.append(item) + + return items, multiselect_enumerators + + def launch(self, session, entities, event): + if not event['data'].get('values', {}): + return + + in_data = event['data']['values'] + + root_values = {} + root_key = "__root__" + for key in tuple(in_data.keys()): + if key.startswith(root_key): + _key = key[len(root_key):] + root_values[_key] = in_data.pop(key) + + root_names = in_data.pop("__rootnames__", None) + root_data = {} + for root_name in json.loads(root_names): + root_data[root_name] = {} + for key, value in tuple(root_values.items()): + prefix = "{}__".format(root_name) + if not key.startswith(prefix): + continue + + _key = key[len(prefix):] + root_data[root_name][_key] = value + + # Find hidden items for multiselect enumerators + keys_to_process = [] + for key in in_data: + if key.startswith("__hidden__"): + keys_to_process.append(key) + + self.log.debug("Preparing data for Multiselect Enumerators") + enumerators = {} + for key in keys_to_process: + new_key = key.replace("__hidden__", "") + enumerator_items = in_data.pop(key) + enumerators[new_key] = json.loads(enumerator_items) + + # find values set for multiselect enumerator + for key, enumerator_items in enumerators.items(): + in_data[key] = [] + + name = "__{}__".format(key) + + for item in enumerator_items: + value = in_data.pop(item) + if value is True: + new_key = item.replace(name, "") + in_data[key].append(new_key) + + self.log.debug("Setting Custom Attribute values") + + project_name = entities[0]["full_name"] + project_settings = ProjectSettings(project_name) + project_anatomy_settings = project_settings["project_anatomy"] + project_anatomy_settings["roots"] = root_data + + custom_attribute_values = {} + attributes_entity = project_anatomy_settings["attributes"] + for key, value in in_data.items(): + if key not in attributes_entity: + custom_attribute_values[key] = value + else: + attributes_entity[key] = value + + project_settings.save() + + entity = entities[0] + for key, value in custom_attribute_values.items(): + entity["custom_attributes"][key] = value + self.log.debug("- Key \"{}\" set to \"{}\"".format(key, value)) + + return True + + +def register(session): + '''Register plugin. Called when used as an plugin.''' + PrepareProjectServer(session).register() diff --git a/openpype/modules/ftrack/event_handlers_server/event_sync_to_avalon.py b/openpype/modules/ftrack/event_handlers_server/event_sync_to_avalon.py index 347b227dd3..3bb01798e4 100644 --- a/openpype/modules/ftrack/event_handlers_server/event_sync_to_avalon.py +++ b/openpype/modules/ftrack/event_handlers_server/event_sync_to_avalon.py @@ -18,12 +18,15 @@ from avalon import schema from avalon.api import AvalonMongoDB from openpype.modules.ftrack.lib import ( + get_openpype_attr, + CUST_ATTR_ID_KEY, + CUST_ATTR_AUTO_SYNC, + avalon_sync, + BaseEvent ) from openpype.modules.ftrack.lib.avalon_sync import ( - CUST_ATTR_ID_KEY, - CUST_ATTR_AUTO_SYNC, EntitySchemas ) @@ -125,7 +128,7 @@ class SyncToAvalonEvent(BaseEvent): @property def avalon_cust_attrs(self): if self._avalon_cust_attrs is None: - self._avalon_cust_attrs = avalon_sync.get_pype_attr( + self._avalon_cust_attrs = get_openpype_attr( self.process_session, query_keys=self.cust_attr_query_keys ) return self._avalon_cust_attrs diff --git a/openpype/modules/ftrack/event_handlers_user/action_clean_hierarchical_attributes.py b/openpype/modules/ftrack/event_handlers_user/action_clean_hierarchical_attributes.py index c326c56a7c..45cc9adf55 100644 --- a/openpype/modules/ftrack/event_handlers_user/action_clean_hierarchical_attributes.py +++ b/openpype/modules/ftrack/event_handlers_user/action_clean_hierarchical_attributes.py @@ -1,7 +1,10 @@ import collections import ftrack_api -from openpype.modules.ftrack.lib import BaseAction, statics_icon -from openpype.modules.ftrack.lib.avalon_sync import get_pype_attr +from openpype.modules.ftrack.lib import ( + BaseAction, + statics_icon, + get_openpype_attr +) class CleanHierarchicalAttrsAction(BaseAction): @@ -52,7 +55,7 @@ class CleanHierarchicalAttrsAction(BaseAction): ) entity_ids_joined = ", ".join(all_entities_ids) - attrs, hier_attrs = get_pype_attr(session) + attrs, hier_attrs = get_openpype_attr(session) for attr in hier_attrs: configuration_key = attr["key"] diff --git a/openpype/modules/ftrack/event_handlers_user/action_create_cust_attrs.py b/openpype/modules/ftrack/event_handlers_user/action_create_cust_attrs.py index 0ebd221e9f..63605eda5e 100644 --- a/openpype/modules/ftrack/event_handlers_user/action_create_cust_attrs.py +++ b/openpype/modules/ftrack/event_handlers_user/action_create_cust_attrs.py @@ -2,10 +2,20 @@ import collections import json import arrow import ftrack_api -from openpype.modules.ftrack.lib import BaseAction, statics_icon -from openpype.modules.ftrack.lib.avalon_sync import ( - CUST_ATTR_ID_KEY, CUST_ATTR_GROUP, default_custom_attributes_definition +from openpype.modules.ftrack.lib import ( + BaseAction, + statics_icon, + + CUST_ATTR_ID_KEY, + CUST_ATTR_GROUP, + CUST_ATTR_TOOLS, + CUST_ATTR_APPLICATIONS, + + default_custom_attributes_definition, + app_definitions_from_app_manager, + tool_definitions_from_app_manager ) + from openpype.api import get_system_settings from openpype.lib import ApplicationManager @@ -370,24 +380,12 @@ class CustomAttributes(BaseAction): exc_info=True ) - def app_defs_from_app_manager(self): - app_definitions = [] - for app_name, app in self.app_manager.applications.items(): - if app.enabled and app.is_host: - app_definitions.append({ - app_name: app.full_label - }) - - if not app_definitions: - app_definitions.append({"empty": "< Empty >"}) - return app_definitions - def applications_attribute(self, event): - apps_data = self.app_defs_from_app_manager() + apps_data = app_definitions_from_app_manager(self.app_manager) applications_custom_attr_data = { "label": "Applications", - "key": "applications", + "key": CUST_ATTR_APPLICATIONS, "type": "enumerator", "entity_type": "show", "group": CUST_ATTR_GROUP, @@ -399,19 +397,11 @@ class CustomAttributes(BaseAction): self.process_attr_data(applications_custom_attr_data, event) def tools_attribute(self, event): - tools_data = [] - for tool_name in self.app_manager.tools.keys(): - tools_data.append({ - tool_name: tool_name - }) - - # Make sure there is at least one item - if not tools_data: - tools_data.append({"empty": "< Empty >"}) + tools_data = tool_definitions_from_app_manager(self.app_manager) tools_custom_attr_data = { "label": "Tools", - "key": "tools_env", + "key": CUST_ATTR_TOOLS, "type": "enumerator", "is_hierarchical": True, "group": CUST_ATTR_GROUP, diff --git a/openpype/modules/ftrack/event_handlers_user/action_prepare_project.py b/openpype/modules/ftrack/event_handlers_user/action_prepare_project.py index 7f674310fc..5298c06371 100644 --- a/openpype/modules/ftrack/event_handlers_user/action_prepare_project.py +++ b/openpype/modules/ftrack/event_handlers_user/action_prepare_project.py @@ -1,31 +1,32 @@ -import os import json -from openpype.modules.ftrack.lib import BaseAction, statics_icon -from openpype.api import config, Anatomy -from openpype.modules.ftrack.lib.avalon_sync import get_pype_attr +from openpype.api import ProjectSettings + +from openpype.modules.ftrack.lib import ( + BaseAction, + statics_icon, + get_openpype_attr, + CUST_ATTR_AUTO_SYNC +) -class PrepareProject(BaseAction): - '''Edit meta data action.''' +class PrepareProjectLocal(BaseAction): + """Prepare project attributes in Anatomy.""" - #: Action identifier. - identifier = 'prepare.project' - #: Action label. - label = 'Prepare Project' - #: Action description. - description = 'Set basic attributes on the project' - #: roles that are allowed to register this action + identifier = "prepare.project.local" + label = "Prepare Project" + description = "Set basic attributes on the project" icon = statics_icon("ftrack", "action_icons", "PrepareProject.svg") + role_list = ["Pypeclub", "Administrator", "Project Manager"] + settings_key = "prepare_project" # Key to store info about trigerring create folder structure - create_project_structure_key = "create_folder_structure" - item_splitter = {'type': 'label', 'value': '---'} + item_splitter = {"type": "label", "value": "---"} def discover(self, session, entities, event): - ''' Validation ''' + """Show only on project.""" if ( len(entities) != 1 or entities[0].entity_type.lower() != "project" @@ -44,27 +45,22 @@ class PrepareProject(BaseAction): self.log.debug("Loading custom attributes") - project_name = entities[0]["full_name"] + project_entity = entities[0] + project_name = project_entity["full_name"] - project_defaults = ( - config.get_presets(project_name) - .get("ftrack", {}) - .get("project_defaults", {}) - ) - - anatomy = Anatomy(project_name) - if not anatomy.roots: + try: + project_settings = ProjectSettings(project_name) + except ValueError: return { - "success": False, - "message": ( - "Have issues with loading Roots for project \"{}\"." - ).format(anatomy.project_name) + "message": "Project is not synchronized yet", + "success": False } - root_items = self.prepare_root_items(anatomy) + project_anatom_settings = project_settings["project_anatomy"] + root_items = self.prepare_root_items(project_anatom_settings) ca_items, multiselect_enumerators = ( - self.prepare_custom_attribute_items(project_defaults) + self.prepare_custom_attribute_items(project_anatom_settings) ) self.log.debug("Heavy items are ready. Preparing last items group.") @@ -74,19 +70,6 @@ class PrepareProject(BaseAction): # Add root items items.extend(root_items) - items.append(self.item_splitter) - - # Ask if want to trigger Action Create Folder Structure - items.append({ - "type": "label", - "value": "

Want to create basic Folder Structure?

" - }) - items.append({ - "name": self.create_project_structure_key, - "type": "boolean", - "value": False, - "label": "Check if Yes" - }) items.append(self.item_splitter) items.append({ @@ -99,10 +82,13 @@ class PrepareProject(BaseAction): # This item will be last (before enumerators) # - sets value of auto synchronization auto_sync_name = "avalon_auto_sync" + auto_sync_value = project_entity["custom_attributes"].get( + CUST_ATTR_AUTO_SYNC, False + ) auto_sync_item = { "name": auto_sync_name, "type": "boolean", - "value": project_defaults.get(auto_sync_name, False), + "value": auto_sync_value, "label": "AutoSync to Avalon" } # Add autosync attribute @@ -117,13 +103,10 @@ class PrepareProject(BaseAction): "title": title } - def prepare_root_items(self, anatomy): - root_items = [] + def prepare_root_items(self, project_anatom_settings): self.log.debug("Root items preparation begins.") - root_names = anatomy.root_names() - roots = anatomy.roots - + root_items = [] root_items.append({ "type": "label", "value": "

Check your Project root settings

" @@ -143,86 +126,41 @@ class PrepareProject(BaseAction): ) }) - default_roots = anatomy.roots - while isinstance(default_roots, dict): - key = tuple(default_roots.keys())[0] - default_roots = default_roots[key] - empty_text = "Enter root path here..." - # Root names is None when anatomy templates contain "{root}" - all_platforms = ["windows", "linux", "darwin"] - if root_names is None: - root_items.append(self.item_splitter) - # find first possible key - for platform in all_platforms: - value = default_roots.raw_data.get(platform) or "" - root_items.append({ - "label": platform, - "name": "__root__{}".format(platform), - "type": "text", - "value": value, - "empty_text": empty_text - }) - return root_items - - root_name_data = {} - missing_roots = [] - for root_name in root_names: - root_name_data[root_name] = {} - if not isinstance(roots, dict): - missing_roots.append(root_name) - continue - - root_item = roots.get(root_name) - if not root_item: - missing_roots.append(root_name) - continue - - for platform in all_platforms: - root_name_data[root_name][platform] = ( - root_item.raw_data.get(platform) or "" - ) - - if missing_roots: - default_values = {} - for platform in all_platforms: - default_values[platform] = ( - default_roots.raw_data.get(platform) or "" - ) - - for root_name in missing_roots: - root_name_data[root_name] = default_values - - root_names = list(root_name_data.keys()) - root_items.append({ - "type": "hidden", - "name": "__rootnames__", - "value": json.dumps(root_names) - }) - - for root_name, values in root_name_data.items(): + roots_entity = project_anatom_settings["roots"] + for root_name, root_entity in roots_entity.items(): root_items.append(self.item_splitter) root_items.append({ "type": "label", "value": "Root: \"{}\"".format(root_name) }) - for platform, value in values.items(): + for platform_name, value_entity in root_entity.items(): root_items.append({ - "label": platform, - "name": "__root__{}{}".format(root_name, platform), + "label": platform_name, + "name": "__root__{}__{}".format(root_name, platform_name), "type": "text", - "value": value, + "value": value_entity.value, "empty_text": empty_text }) + root_items.append({ + "type": "hidden", + "name": "__rootnames__", + "value": json.dumps(list(roots_entity.keys())) + }) + self.log.debug("Root items preparation ended.") return root_items - def _attributes_to_set(self, project_defaults): + def _attributes_to_set(self, project_anatom_settings): attributes_to_set = {} - cust_attrs, hier_cust_attrs = get_pype_attr(self.session, True) + attribute_values_by_key = {} + for key, entity in project_anatom_settings["attributes"].items(): + attribute_values_by_key[key] = entity.value + + cust_attrs, hier_cust_attrs = get_openpype_attr(self.session, True) for attr in hier_cust_attrs: key = attr["key"] @@ -231,7 +169,7 @@ class PrepareProject(BaseAction): attributes_to_set[key] = { "label": attr["label"], "object": attr, - "default": project_defaults.get(key) + "default": attribute_values_by_key.get(key) } for attr in cust_attrs: @@ -243,7 +181,7 @@ class PrepareProject(BaseAction): attributes_to_set[key] = { "label": attr["label"], "object": attr, - "default": project_defaults.get(key) + "default": attribute_values_by_key.get(key) } # Sort by label @@ -253,10 +191,10 @@ class PrepareProject(BaseAction): )) return attributes_to_set - def prepare_custom_attribute_items(self, project_defaults): + def prepare_custom_attribute_items(self, project_anatom_settings): items = [] multiselect_enumerators = [] - attributes_to_set = self._attributes_to_set(project_defaults) + attributes_to_set = self._attributes_to_set(project_anatom_settings) self.log.debug("Preparing interface for keys: \"{}\"".format( str([key for key in attributes_to_set]) @@ -363,24 +301,15 @@ class PrepareProject(BaseAction): root_names = in_data.pop("__rootnames__", None) root_data = {} - if root_names: - for root_name in json.loads(root_names): - root_data[root_name] = {} - for key, value in tuple(root_values.items()): - if key.startswith(root_name): - _key = key[len(root_name):] - root_data[root_name][_key] = value + for root_name in json.loads(root_names): + root_data[root_name] = {} + for key, value in tuple(root_values.items()): + prefix = "{}__".format(root_name) + if not key.startswith(prefix): + continue - else: - for key, value in root_values.items(): - root_data[key] = value - - # TODO implement creating of anatomy for new projects - # project_name = entities[0]["full_name"] - # anatomy = Anatomy(project_name) - - # pop out info about creating project structure - create_proj_struct = in_data.pop(self.create_project_structure_key) + _key = key[len(prefix):] + root_data[root_name][_key] = value # Find hidden items for multiselect enumerators keys_to_process = [] @@ -407,54 +336,31 @@ class PrepareProject(BaseAction): new_key = item.replace(name, "") in_data[key].append(new_key) - self.log.debug("Setting Custom Attribute values:") - entity = entities[0] + self.log.debug("Setting Custom Attribute values") + + project_name = entities[0]["full_name"] + project_settings = ProjectSettings(project_name) + project_anatomy_settings = project_settings["project_anatomy"] + project_anatomy_settings["roots"] = root_data + + custom_attribute_values = {} + attributes_entity = project_anatomy_settings["attributes"] for key, value in in_data.items(): + if key not in attributes_entity: + custom_attribute_values[key] = value + else: + attributes_entity[key] = value + + project_settings.save() + + entity = entities[0] + for key, value in custom_attribute_values.items(): entity["custom_attributes"][key] = value self.log.debug("- Key \"{}\" set to \"{}\"".format(key, value)) - session.commit() - - # Create project structure - self.create_project_specific_config(entities[0]["full_name"], in_data) - - # Trigger Create Project Structure action - if create_proj_struct is True: - self.trigger_action("create.project.structure", event) - return True - def create_project_specific_config(self, project_name, json_data): - self.log.debug("*** Creating project specifig configs ***") - project_specific_path = project_overrides_dir_path(project_name) - if not os.path.exists(project_specific_path): - os.makedirs(project_specific_path) - self.log.debug(( - "Project specific config folder for project \"{}\" created." - ).format(project_name)) - - # Presets #################################### - self.log.debug("--- Processing Presets Begins: ---") - - project_defaults_dir = os.path.normpath(os.path.join( - project_specific_path, "presets", "ftrack" - )) - project_defaults_path = os.path.normpath(os.path.join( - project_defaults_dir, "project_defaults.json" - )) - # Create folder if not exist - if not os.path.exists(project_defaults_dir): - self.log.debug("Creating Ftrack Presets folder: \"{}\"".format( - project_defaults_dir - )) - os.makedirs(project_defaults_dir) - - with open(project_defaults_path, 'w') as file_stream: - json.dump(json_data, file_stream, indent=4) - - self.log.debug("*** Creating project specifig configs Finished ***") - def register(session): '''Register plugin. Called when used as an plugin.''' - PrepareProject(session).register() + PrepareProjectLocal(session).register() diff --git a/openpype/modules/ftrack/ftrack_module.py b/openpype/modules/ftrack/ftrack_module.py index 8a40cac91a..af578de86b 100644 --- a/openpype/modules/ftrack/ftrack_module.py +++ b/openpype/modules/ftrack/ftrack_module.py @@ -1,4 +1,5 @@ import os +import json import collections from abc import ABCMeta, abstractmethod import six @@ -8,10 +9,10 @@ from openpype.modules import ( ITrayModule, IPluginPaths, ITimersManager, - IUserModule, ILaunchHookPaths, ISettingsChangeListener ) +from openpype.settings import SaveWarningExc FTRACK_MODULE_DIR = os.path.dirname(os.path.abspath(__file__)) @@ -32,7 +33,6 @@ class FtrackModule( ITrayModule, IPluginPaths, ITimersManager, - IUserModule, ILaunchHookPaths, ISettingsChangeListener ): @@ -42,7 +42,17 @@ class FtrackModule( ftrack_settings = settings[self.name] self.enabled = ftrack_settings["enabled"] - self.ftrack_url = ftrack_settings["ftrack_server"] + # Add http schema + ftrack_url = ftrack_settings["ftrack_server"].strip("/ ") + if ftrack_url: + if "http" not in ftrack_url: + ftrack_url = "https://" + ftrack_url + + # Check if "ftrack.app" is part os url + if "ftrackapp.com" not in ftrack_url: + ftrack_url = ftrack_url + ".ftrackapp.com" + + self.ftrack_url = ftrack_url current_dir = os.path.dirname(os.path.abspath(__file__)) server_event_handlers_paths = [ @@ -113,15 +123,86 @@ class FtrackModule( if self.tray_module: self.tray_module.stop_timer_manager() - def on_pype_user_change(self, username): - """Implementation of IUserModule interface.""" - if self.tray_module: - self.tray_module.changed_user() - - def on_system_settings_save(self, *_args, **_kwargs): + def on_system_settings_save( + self, old_value, new_value, changes, new_value_metadata + ): """Implementation of ISettingsChangeListener interface.""" - # Ignore - return + try: + session = self.create_ftrack_session() + except Exception: + self.log.warning("Couldn't create ftrack session.", exc_info=True) + raise SaveWarningExc(( + "Saving of attributes to ftrack wasn't successful," + " try running Create/Update Avalon Attributes in ftrack." + )) + + from .lib import ( + get_openpype_attr, + CUST_ATTR_APPLICATIONS, + CUST_ATTR_TOOLS, + app_definitions_from_app_manager, + tool_definitions_from_app_manager + ) + from openpype.api import ApplicationManager + query_keys = [ + "id", + "key", + "config" + ] + custom_attributes = get_openpype_attr( + session, + split_hierarchical=False, + query_keys=query_keys + ) + app_attribute = None + tool_attribute = None + for custom_attribute in custom_attributes: + key = custom_attribute["key"] + if key == CUST_ATTR_APPLICATIONS: + app_attribute = custom_attribute + elif key == CUST_ATTR_TOOLS: + tool_attribute = custom_attribute + + app_manager = ApplicationManager(new_value_metadata) + missing_attributes = [] + if not app_attribute: + missing_attributes.append(CUST_ATTR_APPLICATIONS) + else: + config = json.loads(app_attribute["config"]) + new_data = app_definitions_from_app_manager(app_manager) + prepared_data = [] + for item in new_data: + for key, label in item.items(): + prepared_data.append({ + "menu": label, + "value": key + }) + + config["data"] = json.dumps(prepared_data) + app_attribute["config"] = json.dumps(config) + + if not tool_attribute: + missing_attributes.append(CUST_ATTR_TOOLS) + else: + config = json.loads(tool_attribute["config"]) + new_data = tool_definitions_from_app_manager(app_manager) + prepared_data = [] + for item in new_data: + for key, label in item.items(): + prepared_data.append({ + "menu": label, + "value": key + }) + config["data"] = json.dumps(prepared_data) + tool_attribute["config"] = json.dumps(config) + + session.commit() + + if missing_attributes: + raise SaveWarningExc(( + "Couldn't find custom attribute/s ({}) to update." + " Try running Create/Update Avalon Attributes in ftrack." + ).format(", ".join(missing_attributes))) def on_project_settings_save(self, *_args, **_kwargs): """Implementation of ISettingsChangeListener interface.""" @@ -129,7 +210,7 @@ class FtrackModule( return def on_project_anatomy_save( - self, old_value, new_value, changes, project_name + self, old_value, new_value, changes, project_name, new_value_metadata ): """Implementation of ISettingsChangeListener interface.""" if not project_name: @@ -140,32 +221,49 @@ class FtrackModule( return import ftrack_api - from openpype.modules.ftrack.lib import avalon_sync + from openpype.modules.ftrack.lib import get_openpype_attr + + try: + session = self.create_ftrack_session() + except Exception: + self.log.warning("Couldn't create ftrack session.", exc_info=True) + raise SaveWarningExc(( + "Saving of attributes to ftrack wasn't successful," + " try running Create/Update Avalon Attributes in ftrack." + )) - session = self.create_ftrack_session() project_entity = session.query( "Project where full_name is \"{}\"".format(project_name) ).first() if not project_entity: - self.log.warning(( - "Ftrack project with names \"{}\" was not found." - " Skipping settings attributes change callback." - )) - return + msg = ( + "Ftrack project with name \"{}\" was not found in Ftrack." + " Can't push attribute changes." + ).format(project_name) + self.log.warning(msg) + raise SaveWarningExc(msg) project_id = project_entity["id"] - cust_attr, hier_attr = avalon_sync.get_pype_attr(session) + cust_attr, hier_attr = get_openpype_attr(session) cust_attr_by_key = {attr["key"]: attr for attr in cust_attr} hier_attrs_by_key = {attr["key"]: attr for attr in hier_attr} + + failed = {} + missing = {} for key, value in attributes_changes.items(): configuration = hier_attrs_by_key.get(key) if not configuration: configuration = cust_attr_by_key.get(key) if not configuration: + self.log.warning( + "Custom attribute \"{}\" was not found.".format(key) + ) + missing[key] = value continue + # TODO add add permissions check # TODO add value validations # - value type and list items entity_key = collections.OrderedDict() @@ -179,10 +277,45 @@ class FtrackModule( "value", ftrack_api.symbol.NOT_SET, value - ) ) - session.commit() + try: + session.commit() + self.log.debug( + "Changed project custom attribute \"{}\" to \"{}\"".format( + key, value + ) + ) + except Exception: + self.log.warning( + "Failed to set \"{}\" to \"{}\"".format(key, value), + exc_info=True + ) + session.rollback() + failed[key] = value + + if not failed and not missing: + return + + error_msg = ( + "Values were not updated on Ftrack which may cause issues." + " try running Create/Update Avalon Attributes in ftrack " + " and resave project settings." + ) + if missing: + error_msg += "\nMissing Custom attributes on Ftrack: {}.".format( + ", ".join([ + '"{}"'.format(key) + for key in missing.keys() + ]) + ) + if failed: + joined_failed = ", ".join([ + '"{}": "{}"'.format(key, value) + for key, value in failed.items() + ]) + error_msg += "\nFailed to set: {}".format(joined_failed) + raise SaveWarningExc(error_msg) def create_ftrack_session(self, **session_kwargs): import ftrack_api @@ -210,3 +343,7 @@ class FtrackModule( def tray_exit(self): return self.tray_module.stop_action_server() + + def set_credentials_to_env(self, username, api_key): + os.environ["FTRACK_API_USER"] = username or "" + os.environ["FTRACK_API_KEY"] = api_key or "" diff --git a/openpype/modules/ftrack/ftrack_server/ftrack_server.py b/openpype/modules/ftrack/ftrack_server/ftrack_server.py index 285ca29dc5..bd67fba3d6 100644 --- a/openpype/modules/ftrack/ftrack_server/ftrack_server.py +++ b/openpype/modules/ftrack/ftrack_server/ftrack_server.py @@ -1,4 +1,5 @@ import os +import time import types import logging import traceback @@ -10,7 +11,6 @@ from openpype.lib import ( modules_from_path ) - log = PypeLogger.get_logger(__name__) """ @@ -120,6 +120,18 @@ class FtrackServer: if not session: session = ftrack_api.Session(auto_connect_event_hub=True) + # Wait until session has connected event hub + if session._auto_connect_event_hub_thread: + # Use timeout from session (since ftrack-api 2.1.0) + timeout = getattr(session, "request_timeout", 60) + started = time.time() + while not session.event_hub.connected: + if (time.time() - started) > timeout: + raise RuntimeError(( + "Connection to Ftrack was not created in {} seconds" + ).format(timeout)) + time.sleep(0.1) + self.session = session if load_files: if not self.handler_paths: diff --git a/openpype/modules/ftrack/ftrack_server/lib.py b/openpype/modules/ftrack/ftrack_server/lib.py index 91f3712136..88f849e765 100644 --- a/openpype/modules/ftrack/ftrack_server/lib.py +++ b/openpype/modules/ftrack/ftrack_server/lib.py @@ -3,11 +3,11 @@ import sys import logging import getpass import atexit -import tempfile import threading import datetime import time import queue +import appdirs import pymongo import requests @@ -165,7 +165,6 @@ class ProcessEventHub(SocketBaseEventHub): def wait(self, duration=None): """Overriden wait - Event are loaded from Mongo DB when queue is empty. Handled event is set as processed in Mongo DB. """ @@ -252,7 +251,7 @@ class CustomEventHubSession(ftrack_api.session.Session): self, server_url=None, api_key=None, api_user=None, auto_populate=True, plugin_paths=None, cache=None, cache_key_maker=None, auto_connect_event_hub=False, schema_cache_path=None, - plugin_arguments=None, **kwargs + plugin_arguments=None, timeout=60, **kwargs ): self.kwargs = kwargs @@ -331,6 +330,7 @@ class CustomEventHubSession(ftrack_api.session.Session): self._request.auth = ftrack_api.session.SessionAuthentication( self._api_key, self._api_user ) + self.request_timeout = timeout self.auto_populate = auto_populate @@ -368,8 +368,9 @@ class CustomEventHubSession(ftrack_api.session.Session): # rebuilding types)? if schema_cache_path is not False: if schema_cache_path is None: + schema_cache_path = appdirs.user_cache_dir() schema_cache_path = os.environ.get( - 'FTRACK_API_SCHEMA_CACHE_PATH', tempfile.gettempdir() + 'FTRACK_API_SCHEMA_CACHE_PATH', schema_cache_path ) schema_cache_path = os.path.join( diff --git a/openpype/modules/ftrack/launch_hooks/pre_python2_vendor.py b/openpype/modules/ftrack/launch_hooks/pre_python2_vendor.py index 7826d833ac..d34b6533fb 100644 --- a/openpype/modules/ftrack/launch_hooks/pre_python2_vendor.py +++ b/openpype/modules/ftrack/launch_hooks/pre_python2_vendor.py @@ -8,10 +8,13 @@ class PrePython2Support(PreLaunchHook): Path to vendor modules is added to the beggining of PYTHONPATH. """ - # There will be needed more granular filtering in future - app_groups = ["maya", "nuke", "nukex", "hiero", "nukestudio"] def execute(self): + if not self.application.use_python_2: + return + + self.log.info("Adding Ftrack Python 2 packages to PYTHONPATH.") + # Prepare vendor dir path python_2_vendor = os.path.join(FTRACK_MODULE_DIR, "python2_vendor") diff --git a/openpype/modules/ftrack/lib/__init__.py b/openpype/modules/ftrack/lib/__init__.py index 82b6875590..ce6d5284b6 100644 --- a/openpype/modules/ftrack/lib/__init__.py +++ b/openpype/modules/ftrack/lib/__init__.py @@ -1,7 +1,21 @@ +from .constants import ( + CUST_ATTR_ID_KEY, + CUST_ATTR_AUTO_SYNC, + CUST_ATTR_GROUP, + CUST_ATTR_TOOLS, + CUST_ATTR_APPLICATIONS +) from . settings import ( get_ftrack_url_from_settings, get_ftrack_event_mongo_info ) +from .custom_attributes import ( + default_custom_attributes_definition, + app_definitions_from_app_manager, + tool_definitions_from_app_manager, + get_openpype_attr +) + from . import avalon_sync from . import credentials from .ftrack_base_handler import BaseHandler @@ -10,9 +24,20 @@ from .ftrack_action_handler import BaseAction, ServerAction, statics_icon __all__ = ( + "CUST_ATTR_ID_KEY", + "CUST_ATTR_AUTO_SYNC", + "CUST_ATTR_GROUP", + "CUST_ATTR_TOOLS", + "CUST_ATTR_APPLICATIONS", + "get_ftrack_url_from_settings", "get_ftrack_event_mongo_info", + "default_custom_attributes_definition", + "app_definitions_from_app_manager", + "tool_definitions_from_app_manager", + "get_openpype_attr", + "avalon_sync", "credentials", diff --git a/openpype/modules/ftrack/lib/avalon_sync.py b/openpype/modules/ftrack/lib/avalon_sync.py index 7511c2627b..f58e858a5a 100644 --- a/openpype/modules/ftrack/lib/avalon_sync.py +++ b/openpype/modules/ftrack/lib/avalon_sync.py @@ -14,17 +14,21 @@ else: from avalon.api import AvalonMongoDB import avalon + from openpype.api import ( Logger, Anatomy, get_anatomy_settings ) +from openpype.lib import ApplicationManager + +from .constants import CUST_ATTR_ID_KEY +from .custom_attributes import get_openpype_attr from bson.objectid import ObjectId from bson.errors import InvalidId from pymongo import UpdateOne import ftrack_api -from openpype.lib import ApplicationManager log = Logger.get_logger(__name__) @@ -36,23 +40,6 @@ EntitySchemas = { "config": "openpype:config-2.0" } -# Group name of custom attributes -CUST_ATTR_GROUP = "openpype" - -# name of Custom attribute that stores mongo_id from avalon db -CUST_ATTR_ID_KEY = "avalon_mongo_id" -CUST_ATTR_AUTO_SYNC = "avalon_auto_sync" - - -def default_custom_attributes_definition(): - json_file_path = os.path.join( - os.path.dirname(os.path.abspath(__file__)), - "custom_attributes.json" - ) - with open(json_file_path, "r") as json_stream: - data = json.load(json_stream) - return data - def check_regex(name, entity_type, in_schema=None, schema_patterns=None): schema_name = "asset-3.0" @@ -91,39 +78,6 @@ def join_query_keys(keys): return ",".join(["\"{}\"".format(key) for key in keys]) -def get_pype_attr(session, split_hierarchical=True, query_keys=None): - custom_attributes = [] - hier_custom_attributes = [] - if not query_keys: - query_keys = [ - "id", - "entity_type", - "object_type_id", - "is_hierarchical", - "default" - ] - # TODO remove deprecated "pype" group from query - cust_attrs_query = ( - "select {}" - " from CustomAttributeConfiguration" - # Kept `pype` for Backwards Compatiblity - " where group.name in (\"pype\", \"{}\")" - ).format(", ".join(query_keys), CUST_ATTR_GROUP) - all_avalon_attr = session.query(cust_attrs_query).all() - for cust_attr in all_avalon_attr: - if split_hierarchical and cust_attr["is_hierarchical"]: - hier_custom_attributes.append(cust_attr) - continue - - custom_attributes.append(cust_attr) - - if split_hierarchical: - # return tuple - return custom_attributes, hier_custom_attributes - - return custom_attributes - - def get_python_type_for_custom_attribute(cust_attr, cust_attr_type_name=None): """Python type that should value of custom attribute have. @@ -891,10 +845,37 @@ class SyncEntitiesFactory: self.entities_dict[parent_id]["children"].remove(id) + def _query_custom_attributes(self, session, conf_ids, entity_ids): + output = [] + # Prepare values to query + attributes_joined = join_query_keys(conf_ids) + attributes_len = len(conf_ids) + chunk_size = int(5000 / attributes_len) + for idx in range(0, len(entity_ids), chunk_size): + entity_ids_joined = join_query_keys( + entity_ids[idx:idx + chunk_size] + ) + + call_expr = [{ + "action": "query", + "expression": ( + "select value, entity_id from ContextCustomAttributeValue " + "where entity_id in ({}) and configuration_id in ({})" + ).format(entity_ids_joined, attributes_joined) + }] + if hasattr(session, "call"): + [result] = session.call(call_expr) + else: + [result] = session._call(call_expr) + + for item in result["data"]: + output.append(item) + return output + def set_cutom_attributes(self): self.log.debug("* Preparing custom attributes") # Get custom attributes and values - custom_attrs, hier_attrs = get_pype_attr( + custom_attrs, hier_attrs = get_openpype_attr( self.session, query_keys=self.cust_attr_query_keys ) ent_types = self.session.query("select id, name from ObjectType").all() @@ -1000,31 +981,13 @@ class SyncEntitiesFactory: copy.deepcopy(prepared_avalon_attr_ca_id) ) - # TODO query custom attributes by entity_id - entity_ids_joined = ", ".join([ - "\"{}\"".format(id) for id in sync_ids - ]) - attributes_joined = ", ".join([ - "\"{}\"".format(attr_id) for attr_id in attribute_key_by_id.keys() - ]) - - cust_attr_query = ( - "select value, configuration_id, entity_id" - " from ContextCustomAttributeValue" - " where entity_id in ({}) and configuration_id in ({})" + items = self._query_custom_attributes( + self.session, + list(attribute_key_by_id.keys()), + sync_ids ) - call_expr = [{ - "action": "query", - "expression": cust_attr_query.format( - entity_ids_joined, attributes_joined - ) - }] - if hasattr(self.session, "call"): - [values] = self.session.call(call_expr) - else: - [values] = self.session._call(call_expr) - for item in values["data"]: + for item in items: entity_id = item["entity_id"] attr_id = item["configuration_id"] key = attribute_key_by_id[attr_id] @@ -1106,28 +1069,14 @@ class SyncEntitiesFactory: for key, val in prepare_dict_avalon.items(): entity_dict["avalon_attrs"][key] = val - # Prepare values to query - entity_ids_joined = ", ".join([ - "\"{}\"".format(id) for id in sync_ids - ]) - attributes_joined = ", ".join([ - "\"{}\"".format(attr_id) for attr_id in attribute_key_by_id.keys() - ]) - avalon_hier = [] - call_expr = [{ - "action": "query", - "expression": ( - "select value, entity_id, configuration_id" - " from ContextCustomAttributeValue" - " where entity_id in ({}) and configuration_id in ({})" - ).format(entity_ids_joined, attributes_joined) - }] - if hasattr(self.session, "call"): - [values] = self.session.call(call_expr) - else: - [values] = self.session._call(call_expr) + items = self._query_custom_attributes( + self.session, + list(attribute_key_by_id.keys()), + sync_ids + ) - for item in values["data"]: + avalon_hier = [] + for item in items: value = item["value"] # WARNING It is not possible to propage enumerate hierachical # attributes with multiselection 100% right. Unseting all values @@ -1256,19 +1205,21 @@ class SyncEntitiesFactory: if not msg or not items: continue self.report_items["warning"][msg] = items - tasks = {} - for task_type in task_types: - task_type_name = task_type["name"] - # Set short name to empty string - # QUESTION Maybe better would be to lower and remove spaces - # from task type name. - tasks[task_type_name] = { - "short_name": "" - } current_project_anatomy_data = get_anatomy_settings( project_name, exclude_locals=True ) + anatomy_tasks = current_project_anatomy_data["tasks"] + tasks = {} + default_type_data = { + "short_name": "" + } + for task_type in task_types: + task_type_name = task_type["name"] + tasks[task_type_name] = copy.deepcopy( + anatomy_tasks.get(task_type_name) + or default_type_data + ) project_config = { "tasks": tasks, @@ -2511,7 +2462,7 @@ class SyncEntitiesFactory: if new_entity_id not in p_chilren: self.entities_dict[parent_id]["children"].append(new_entity_id) - cust_attr, _ = get_pype_attr(self.session) + cust_attr, _ = get_openpype_attr(self.session) for _attr in cust_attr: key = _attr["key"] if key not in av_entity["data"]: diff --git a/openpype/modules/ftrack/lib/constants.py b/openpype/modules/ftrack/lib/constants.py new file mode 100644 index 0000000000..73d5112e6d --- /dev/null +++ b/openpype/modules/ftrack/lib/constants.py @@ -0,0 +1,12 @@ +# Group name of custom attributes +CUST_ATTR_GROUP = "openpype" + +# name of Custom attribute that stores mongo_id from avalon db +CUST_ATTR_ID_KEY = "avalon_mongo_id" +# Auto sync of project +CUST_ATTR_AUTO_SYNC = "avalon_auto_sync" + +# Applications custom attribute name +CUST_ATTR_APPLICATIONS = "applications" +# Environment tools custom attribute +CUST_ATTR_TOOLS = "tools_env" diff --git a/openpype/modules/ftrack/lib/credentials.py b/openpype/modules/ftrack/lib/credentials.py index 16b1fb25fb..4e29e66382 100644 --- a/openpype/modules/ftrack/lib/credentials.py +++ b/openpype/modules/ftrack/lib/credentials.py @@ -1,28 +1,24 @@ import os -import json import ftrack_api -import appdirs -import getpass + try: from urllib.parse import urlparse except ImportError: from urlparse import urlparse -CONFIG_PATH = os.path.normpath(appdirs.user_data_dir("pype-app", "pype")) -CREDENTIALS_FILE_NAME = "ftrack_cred.json" -CREDENTIALS_PATH = os.path.join(CONFIG_PATH, CREDENTIALS_FILE_NAME) -CREDENTIALS_FOLDER = os.path.dirname(CREDENTIALS_PATH) +from openpype.lib import OpenPypeSecureRegistry -if not os.path.isdir(CREDENTIALS_FOLDER): - os.makedirs(CREDENTIALS_FOLDER) - -USER_GETTER = None +USERNAME_KEY = "username" +API_KEY_KEY = "api_key" def get_ftrack_hostname(ftrack_server=None): if not ftrack_server: - ftrack_server = os.environ["FTRACK_SERVER"] + ftrack_server = os.environ.get("FTRACK_SERVER") + + if not ftrack_server: + return None if "//" not in ftrack_server: ftrack_server = "//" + ftrack_server @@ -30,112 +26,80 @@ def get_ftrack_hostname(ftrack_server=None): return urlparse(ftrack_server).hostname -def get_user(): - if USER_GETTER: - return USER_GETTER() - return getpass.getuser() +def _get_ftrack_secure_key(hostname, key): + """Secure item key for entered hostname.""" + return "/".join(("ftrack", hostname, key)) -def get_credentials(ftrack_server=None, user=None): - credentials = {} - if not os.path.exists(CREDENTIALS_PATH): - with open(CREDENTIALS_PATH, "w") as file: - file.write(json.dumps(credentials)) - file.close() - return credentials - - with open(CREDENTIALS_PATH, "r") as file: - content = file.read() - - hostname = get_ftrack_hostname(ftrack_server) - if not user: - user = get_user() - - content_json = json.loads(content or "{}") - credentials = content_json.get(hostname, {}).get(user) or {} - - return credentials - - -def save_credentials(ft_user, ft_api_key, ftrack_server=None, user=None): - hostname = get_ftrack_hostname(ftrack_server) - if not user: - user = get_user() - - with open(CREDENTIALS_PATH, "r") as file: - content = file.read() - - content_json = json.loads(content or "{}") - if hostname not in content_json: - content_json[hostname] = {} - - content_json[hostname][user] = { - "username": ft_user, - "api_key": ft_api_key +def get_credentials(ftrack_server=None): + output = { + USERNAME_KEY: None, + API_KEY_KEY: None } - - # Deprecated keys - if "username" in content_json: - content_json.pop("username") - if "apiKey" in content_json: - content_json.pop("apiKey") - - with open(CREDENTIALS_PATH, "w") as file: - file.write(json.dumps(content_json, indent=4)) - - -def clear_credentials(ft_user=None, ftrack_server=None, user=None): - if not ft_user: - ft_user = os.environ.get("FTRACK_API_USER") - - if not ft_user: - return - hostname = get_ftrack_hostname(ftrack_server) - if not user: - user = get_user() + if not hostname: + return output - with open(CREDENTIALS_PATH, "r") as file: - content = file.read() + username_name = _get_ftrack_secure_key(hostname, USERNAME_KEY) + api_key_name = _get_ftrack_secure_key(hostname, API_KEY_KEY) - content_json = json.loads(content or "{}") - if hostname not in content_json: - content_json[hostname] = {} + username_registry = OpenPypeSecureRegistry(username_name) + api_key_registry = OpenPypeSecureRegistry(api_key_name) - content_json[hostname].pop(user, None) + output[USERNAME_KEY] = username_registry.get_item(USERNAME_KEY, None) + output[API_KEY_KEY] = api_key_registry.get_item(API_KEY_KEY, None) - with open(CREDENTIALS_PATH, "w") as file: - file.write(json.dumps(content_json)) + return output -def set_env(ft_user=None, ft_api_key=None): - os.environ["FTRACK_API_USER"] = ft_user or "" - os.environ["FTRACK_API_KEY"] = ft_api_key or "" +def save_credentials(username, api_key, ftrack_server=None): + hostname = get_ftrack_hostname(ftrack_server) + username_name = _get_ftrack_secure_key(hostname, USERNAME_KEY) + api_key_name = _get_ftrack_secure_key(hostname, API_KEY_KEY) + + # Clear credentials + clear_credentials(ftrack_server) + + username_registry = OpenPypeSecureRegistry(username_name) + api_key_registry = OpenPypeSecureRegistry(api_key_name) + + username_registry.set_item(USERNAME_KEY, username) + api_key_registry.set_item(API_KEY_KEY, api_key) -def get_env_credentials(): - return ( - os.environ.get("FTRACK_API_USER"), - os.environ.get("FTRACK_API_KEY") - ) +def clear_credentials(ftrack_server=None): + hostname = get_ftrack_hostname(ftrack_server) + username_name = _get_ftrack_secure_key(hostname, USERNAME_KEY) + api_key_name = _get_ftrack_secure_key(hostname, API_KEY_KEY) + + username_registry = OpenPypeSecureRegistry(username_name) + api_key_registry = OpenPypeSecureRegistry(api_key_name) + + current_username = username_registry.get_item(USERNAME_KEY, None) + current_api_key = api_key_registry.get_item(API_KEY_KEY, None) + + if current_username is not None: + username_registry.delete_item(USERNAME_KEY) + + if current_api_key is not None: + api_key_registry.delete_item(API_KEY_KEY) -def check_credentials(ft_user, ft_api_key, ftrack_server=None): +def check_credentials(username, api_key, ftrack_server=None): if not ftrack_server: - ftrack_server = os.environ["FTRACK_SERVER"] + ftrack_server = os.environ.get("FTRACK_SERVER") - if not ft_user or not ft_api_key: + if not ftrack_server or not username or not api_key: return False try: session = ftrack_api.Session( server_url=ftrack_server, - api_key=ft_api_key, - api_user=ft_user + api_key=api_key, + api_user=username ) session.close() except Exception: return False - return True diff --git a/openpype/modules/ftrack/lib/custom_attributes.py b/openpype/modules/ftrack/lib/custom_attributes.py new file mode 100644 index 0000000000..33eea32baa --- /dev/null +++ b/openpype/modules/ftrack/lib/custom_attributes.py @@ -0,0 +1,73 @@ +import os +import json + +from .constants import CUST_ATTR_GROUP + + +def default_custom_attributes_definition(): + json_file_path = os.path.join( + os.path.dirname(os.path.abspath(__file__)), + "custom_attributes.json" + ) + with open(json_file_path, "r") as json_stream: + data = json.load(json_stream) + return data + + +def app_definitions_from_app_manager(app_manager): + app_definitions = [] + for app_name, app in app_manager.applications.items(): + if app.enabled and app.is_host: + app_definitions.append({ + app_name: app.full_label + }) + + if not app_definitions: + app_definitions.append({"empty": "< Empty >"}) + return app_definitions + + +def tool_definitions_from_app_manager(app_manager): + tools_data = [] + for tool_name, tool in app_manager.tools.items(): + tools_data.append({ + tool_name: tool.label + }) + + # Make sure there is at least one item + if not tools_data: + tools_data.append({"empty": "< Empty >"}) + return tools_data + + +def get_openpype_attr(session, split_hierarchical=True, query_keys=None): + custom_attributes = [] + hier_custom_attributes = [] + if not query_keys: + query_keys = [ + "id", + "entity_type", + "object_type_id", + "is_hierarchical", + "default" + ] + # TODO remove deprecated "pype" group from query + cust_attrs_query = ( + "select {}" + " from CustomAttributeConfiguration" + # Kept `pype` for Backwards Compatiblity + " where group.name in (\"pype\", \"{}\")" + ).format(", ".join(query_keys), CUST_ATTR_GROUP) + all_avalon_attr = session.query(cust_attrs_query).all() + for cust_attr in all_avalon_attr: + if split_hierarchical and cust_attr["is_hierarchical"]: + hier_custom_attributes.append(cust_attr) + continue + + custom_attributes.append(cust_attr) + + if split_hierarchical: + # return tuple + return custom_attributes, hier_custom_attributes + + return custom_attributes diff --git a/openpype/modules/ftrack/lib/settings.py b/openpype/modules/ftrack/lib/settings.py index f6967411db..027356edc6 100644 --- a/openpype/modules/ftrack/lib/settings.py +++ b/openpype/modules/ftrack/lib/settings.py @@ -1,6 +1,7 @@ import os from openpype.api import get_system_settings + def get_ftrack_settings(): return get_system_settings()["modules"]["ftrack"] @@ -10,7 +11,6 @@ def get_ftrack_url_from_settings(): def get_ftrack_event_mongo_info(): - ftrack_settings = get_ftrack_settings() database_name = os.environ["OPENPYPE_DATABASE_NAME"] collection_name = "ftrack_events" return database_name, collection_name diff --git a/openpype/modules/ftrack/tray/ftrack_tray.py b/openpype/modules/ftrack/tray/ftrack_tray.py index 9da5db835b..34e4646767 100644 --- a/openpype/modules/ftrack/tray/ftrack_tray.py +++ b/openpype/modules/ftrack/tray/ftrack_tray.py @@ -30,7 +30,7 @@ class FtrackTrayWrapper: self.bool_action_thread_running = False self.bool_timer_event = False - self.widget_login = login_dialog.CredentialsDialog() + self.widget_login = login_dialog.CredentialsDialog(module) self.widget_login.login_changed.connect(self.on_login_change) self.widget_login.logout_signal.connect(self.on_logout) @@ -56,7 +56,7 @@ class FtrackTrayWrapper: validation = credentials.check_credentials(ft_user, ft_api_key) if validation: self.widget_login.set_credentials(ft_user, ft_api_key) - credentials.set_env(ft_user, ft_api_key) + self.module.set_credentials_to_env(ft_user, ft_api_key) log.info("Connected to Ftrack successfully") self.on_login_change() @@ -289,12 +289,6 @@ class FtrackTrayWrapper: parent_menu.addMenu(tray_menu) - def tray_start(self): - self.validate() - - def tray_exit(self): - self.stop_action_server() - # Definition of visibility of each menu actions def set_menu_visibility(self): self.tray_server_menu.menuAction().setVisible(self.bool_logged) @@ -337,7 +331,7 @@ class FtrackTrayWrapper: def changed_user(self): self.stop_action_server() - credentials.set_env() + self.module.set_credentials_to_env(None, None) self.validate() def start_timer_manager(self, data): diff --git a/openpype/modules/ftrack/tray/login_dialog.py b/openpype/modules/ftrack/tray/login_dialog.py index ca409ebcaa..a6360a7380 100644 --- a/openpype/modules/ftrack/tray/login_dialog.py +++ b/openpype/modules/ftrack/tray/login_dialog.py @@ -14,11 +14,13 @@ class CredentialsDialog(QtWidgets.QDialog): login_changed = QtCore.Signal() logout_signal = QtCore.Signal() - def __init__(self, parent=None): + def __init__(self, module, parent=None): super(CredentialsDialog, self).__init__(parent) self.setWindowTitle("OpenPype - Ftrack Login") + self._module = module + self._login_server_thread = None self._is_logged = False self._in_advance_mode = False @@ -132,11 +134,11 @@ class CredentialsDialog(QtWidgets.QDialog): def fill_ftrack_url(self): url = os.getenv("FTRACK_SERVER") - if url == self.ftsite_input.text(): + checked_url = self.check_url(url) + if checked_url == self.ftsite_input.text(): return - checked_url = self.check_url(url) - self.ftsite_input.setText(checked_url or "") + self.ftsite_input.setText(checked_url or "< Not set >") enabled = bool(checked_url) @@ -145,7 +147,15 @@ class CredentialsDialog(QtWidgets.QDialog): self.api_input.setEnabled(enabled) self.user_input.setEnabled(enabled) - self.ftsite_input.setEnabled(enabled) + + if not url: + self.btn_advanced.hide() + self.btn_simple.hide() + self.btn_ftrack_login.hide() + self.btn_login.hide() + self.note_label.hide() + self.api_input.hide() + self.user_input.hide() def set_advanced_mode(self, is_advanced): self._in_advance_mode = is_advanced @@ -268,7 +278,7 @@ class CredentialsDialog(QtWidgets.QDialog): verification = credentials.check_credentials(username, api_key) if verification: credentials.save_credentials(username, api_key, False) - credentials.set_env(username, api_key) + self._module.set_credentials_to_env(username, api_key) self.set_credentials(username, api_key) self.login_changed.emit() return verification @@ -291,10 +301,9 @@ class CredentialsDialog(QtWidgets.QDialog): url = url.strip("/ ") if not url: - self.set_error(( - "You need to specify a valid server URL, " - "for example https://server-name.ftrackapp.com" - )) + self.set_error( + "Ftrack URL is not defined in settings!" + ) return if "http" not in url: diff --git a/openpype/modules/idle_manager/idle_module.py b/openpype/modules/idle_manager/idle_module.py index ddccf07f6a..5dd5160aa7 100644 --- a/openpype/modules/idle_manager/idle_module.py +++ b/openpype/modules/idle_manager/idle_module.py @@ -1,3 +1,4 @@ +import platform import collections from abc import ABCMeta, abstractmethod @@ -40,8 +41,12 @@ class IdleManager(PypeModule, ITrayService): name = "idle_manager" def initialize(self, module_settings): - idle_man_settings = module_settings[self.name] - self.enabled = idle_man_settings["enabled"] + enabled = True + # Ignore on MacOs + # - pynput need root permissions and enabled access for application + if platform.system().lower() == "darwin": + enabled = False + self.enabled = enabled self.time_callbacks = collections.defaultdict(list) self.idle_thread = None @@ -50,7 +55,8 @@ class IdleManager(PypeModule, ITrayService): return def tray_start(self): - self.start_thread() + if self.time_callbacks: + self.start_thread() def tray_exit(self): self.stop_thread() diff --git a/openpype/modules/launcher_action.py b/openpype/modules/launcher_action.py index da0468d495..5ed8585b6a 100644 --- a/openpype/modules/launcher_action.py +++ b/openpype/modules/launcher_action.py @@ -22,7 +22,6 @@ class LauncherAction(PypeModule, ITrayAction): # Register actions if self.tray_initialized: from openpype.tools.launcher import actions - # actions.register_default_actions() actions.register_config_actions() actions_paths = self.manager.collect_plugin_paths()["actions"] actions.register_actions_from_paths(actions_paths) diff --git a/openpype/modules/settings_action.py b/openpype/modules/settings_action.py index 371e190c12..3f7cb8c3ba 100644 --- a/openpype/modules/settings_action.py +++ b/openpype/modules/settings_action.py @@ -16,18 +16,20 @@ class ISettingsChangeListener: } """ @abstractmethod - def on_system_settings_save(self, old_value, new_value, changes): + def on_system_settings_save( + self, old_value, new_value, changes, new_value_metadata + ): pass @abstractmethod def on_project_settings_save( - self, old_value, new_value, changes, project_name + self, old_value, new_value, changes, project_name, new_value_metadata ): pass @abstractmethod def on_project_anatomy_save( - self, old_value, new_value, changes, project_name + self, old_value, new_value, changes, project_name, new_value_metadata ): pass diff --git a/openpype/modules/sync_server/__init__.py b/openpype/modules/sync_server/__init__.py index 7123536fcf..a814f0db62 100644 --- a/openpype/modules/sync_server/__init__.py +++ b/openpype/modules/sync_server/__init__.py @@ -1,5 +1,5 @@ -from openpype.modules.sync_server.sync_server import SyncServer +from openpype.modules.sync_server.sync_server_module import SyncServerModule def tray_init(tray_widget, main_widget): - return SyncServer() + return SyncServerModule() diff --git a/openpype/modules/sync_server/providers/__init__.py b/openpype/modules/sync_server/providers/__init__.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/openpype/modules/sync_server/providers/abstract_provider.py b/openpype/modules/sync_server/providers/abstract_provider.py index 001d4c4d50..a60595ba93 100644 --- a/openpype/modules/sync_server/providers/abstract_provider.py +++ b/openpype/modules/sync_server/providers/abstract_provider.py @@ -1,16 +1,23 @@ -from abc import ABCMeta, abstractmethod +import abc +import six +from openpype.api import Logger + +log = Logger().get_logger("SyncServer") -class AbstractProvider(metaclass=ABCMeta): +@six.add_metaclass(abc.ABCMeta) +class AbstractProvider: - def __init__(self, site_name, tree=None, presets=None): + def __init__(self, project_name, site_name, tree=None, presets=None): self.presets = None self.active = False self.site_name = site_name self.presets = presets - @abstractmethod + super(AbstractProvider, self).__init__() + + @abc.abstractmethod def is_active(self): """ Returns True if provider is activated, eg. has working credentials. @@ -18,36 +25,54 @@ class AbstractProvider(metaclass=ABCMeta): (boolean) """ - @abstractmethod - def upload_file(self, source_path, target_path, overwrite=True): + @abc.abstractmethod + def upload_file(self, source_path, path, + server, collection, file, representation, site, + overwrite=False): """ Copy file from 'source_path' to 'target_path' on provider. Use 'overwrite' boolean to rewrite existing file on provider Args: - source_path (string): absolute path on local system - target_path (string): absolute path on provider (GDrive etc.) - overwrite (boolean): True if overwite existing + source_path (string): + path (string): absolute path with or without name of the file + overwrite (boolean): replace existing file + + arguments for saving progress: + server (SyncServer): server instance to call update_db on + collection (str): name of collection + file (dict): info about uploaded file (matches structure from db) + representation (dict): complete repre containing 'file' + site (str): site name Returns: (string) file_id of created file, raises exception """ pass - @abstractmethod - def download_file(self, source_path, local_path, overwrite=True): + @abc.abstractmethod + def download_file(self, source_path, local_path, + server, collection, file, representation, site, + overwrite=False): """ Download file from provider into local system Args: source_path (string): absolute path on provider - local_path (string): absolute path on local - overwrite (bool): default set to True + local_path (string): absolute path with or without name of the file + overwrite (boolean): replace existing file + + arguments for saving progress: + server (SyncServer): server instance to call update_db on + collection (str): name of collection + file (dict): info about uploaded file (matches structure from db) + representation (dict): complete repre containing 'file' + site (str): site name Returns: None """ pass - @abstractmethod + @abc.abstractmethod def delete_file(self, path): """ Deletes file from 'path'. Expects path to specific file. @@ -60,7 +85,7 @@ class AbstractProvider(metaclass=ABCMeta): """ pass - @abstractmethod + @abc.abstractmethod def list_folder(self, folder_path): """ List all files and subfolders of particular path non-recursively. @@ -72,7 +97,7 @@ class AbstractProvider(metaclass=ABCMeta): """ pass - @abstractmethod + @abc.abstractmethod def create_folder(self, folder_path): """ Create all nonexistent folders and subfolders in 'path'. @@ -85,7 +110,7 @@ class AbstractProvider(metaclass=ABCMeta): """ pass - @abstractmethod + @abc.abstractmethod def get_tree(self): """ Creates folder structure for providers which do not provide @@ -94,16 +119,50 @@ class AbstractProvider(metaclass=ABCMeta): """ pass - @abstractmethod - def resolve_path(self, path, root_config, anatomy=None): + @abc.abstractmethod + def get_roots_config(self, anatomy=None): """ - Replaces root placeholders with appropriate real value from - 'root_configs' (from Settings or Local Settings) or Anatomy - (mainly for 'studio' site) + Returns root values for path resolving - Args: - path(string): path with '{root[work]}/...' - root_config(dict): from Settings or Local Settings - anatomy (Anatomy): prepared anatomy object for project + Takes value from Anatomy which takes values from Settings + overridden by Local Settings + + Returns: + (dict) - {"root": {"root": "/My Drive"}} + OR + {"root": {"root_ONE": "value", "root_TWO":"value}} + Format is importing for usage of python's format ** approach """ pass + + def resolve_path(self, path, root_config=None, anatomy=None): + """ + Replaces all root placeholders with proper values + + Args: + path(string): root[work]/folder... + root_config (dict): {'work': "c:/..."...} + anatomy (Anatomy): object of Anatomy + Returns: + (string): proper url + """ + if not root_config: + root_config = self.get_roots_config(anatomy) + + if root_config and not root_config.get("root"): + root_config = {"root": root_config} + + try: + if not root_config: + raise KeyError + + path = path.format(**root_config) + except KeyError: + try: + path = anatomy.fill_root(path) + except KeyError: + msg = "Error in resolving local root from anatomy" + log.error(msg) + raise ValueError(msg) + + return path diff --git a/openpype/modules/sync_server/providers/gdrive.py b/openpype/modules/sync_server/providers/gdrive.py index 6c01bc4e6f..b67e5a6cfa 100644 --- a/openpype/modules/sync_server/providers/gdrive.py +++ b/openpype/modules/sync_server/providers/gdrive.py @@ -7,9 +7,10 @@ from .abstract_provider import AbstractProvider from googleapiclient.http import MediaFileUpload, MediaIoBaseDownload from openpype.api import Logger from openpype.api import get_system_settings -from ..utils import time_function +from ..utils import time_function, ResumableError import time + SCOPES = ['https://www.googleapis.com/auth/drive.metadata.readonly', 'https://www.googleapis.com/auth/drive.file', 'https://www.googleapis.com/auth/drive.readonly'] # for write|delete @@ -45,9 +46,10 @@ class GDriveHandler(AbstractProvider): MY_DRIVE_STR = 'My Drive' # name of root folder of regular Google drive CHUNK_SIZE = 2097152 # must be divisible by 256! - def __init__(self, site_name, tree=None, presets=None): + def __init__(self, project_name, site_name, tree=None, presets=None): self.presets = None self.active = False + self.project_name = project_name self.site_name = site_name self.presets = presets @@ -61,141 +63,17 @@ class GDriveHandler(AbstractProvider): return self.service = self._get_gd_service() - self.root = self._prepare_root_info() + try: + self.root = self._prepare_root_info() + except errors.HttpError: + log.warning("HttpError in sync loop, " + "trying next loop", + exc_info=True) + raise ResumableError + self._tree = tree self.active = True - def _get_gd_service(self): - """ - Authorize client with 'credentials.json', uses service account. - Service account needs to have target folder shared with. - Produces service that communicates with GDrive API. - - Returns: - None - """ - creds = service_account.Credentials.from_service_account_file( - self.presets["credentials_url"], - scopes=SCOPES) - service = build('drive', 'v3', - credentials=creds, cache_discovery=False) - return service - - def _prepare_root_info(self): - """ - Prepare info about roots and theirs folder ids from 'presets'. - Configuration might be for single or multiroot projects. - Regular My Drive and Shared drives are implemented, their root - folder ids need to be queried in slightly different way. - - Returns: - (dicts) of dicts where root folders are keys - """ - roots = {} - for path in self.get_roots_config().values(): - if self.MY_DRIVE_STR in path: - roots[self.MY_DRIVE_STR] = self.service.files()\ - .get(fileId='root').execute() - else: - shared_drives = [] - page_token = None - - while True: - response = self.service.drives().list( - pageSize=100, - pageToken=page_token).execute() - shared_drives.extend(response.get('drives', [])) - page_token = response.get('nextPageToken', None) - if page_token is None: - break - - folders = path.split('/') - if len(folders) < 2: - raise ValueError("Wrong root folder definition {}". - format(path)) - - for shared_drive in shared_drives: - if folders[1] in shared_drive["name"]: - roots[shared_drive["name"]] = { - "name": shared_drive["name"], - "id": shared_drive["id"]} - if self.MY_DRIVE_STR not in roots: # add My Drive always - roots[self.MY_DRIVE_STR] = self.service.files() \ - .get(fileId='root').execute() - - return roots - - @time_function - def _build_tree(self, folders): - """ - Create in-memory structure resolving paths to folder id as - recursive querying might be slower. - Initialized in the time of class initialization. - Maybe should be persisted - Tree is structure of path to id: - '/ROOT': {'id': '1234567'} - '/ROOT/PROJECT_FOLDER': {'id':'222222'} - '/ROOT/PROJECT_FOLDER/Assets': {'id': '3434545'} - Args: - folders (list): list of dictionaries with folder metadata - Returns: - (dictionary) path as a key, folder id as a value - """ - log.debug("build_tree len {}".format(len(folders))) - root_ids = [] - default_root_id = None - tree = {} - ending_by = {} - for root_name, root in self.root.items(): # might be multiple roots - if root["id"] not in root_ids: - tree["/" + root_name] = {"id": root["id"]} - ending_by[root["id"]] = "/" + root_name - root_ids.append(root["id"]) - - if self.MY_DRIVE_STR == root_name: - default_root_id = root["id"] - - no_parents_yet = {} - while folders: - folder = folders.pop(0) - parents = folder.get("parents", []) - # weird cases, shared folders, etc, parent under root - if not parents: - parent = default_root_id - else: - parent = parents[0] - - if folder["id"] in root_ids: # do not process root - continue - - if parent in ending_by: - path_key = ending_by[parent] + "/" + folder["name"] - ending_by[folder["id"]] = path_key - tree[path_key] = {"id": folder["id"]} - else: - no_parents_yet.setdefault(parent, []).append((folder["id"], - folder["name"])) - loop_cnt = 0 - # break if looped more then X times - safety against infinite loop - while no_parents_yet and loop_cnt < 20: - - keys = list(no_parents_yet.keys()) - for parent in keys: - if parent in ending_by.keys(): - subfolders = no_parents_yet.pop(parent) - for folder_id, folder_name in subfolders: - path_key = ending_by[parent] + "/" + folder_name - ending_by[folder_id] = path_key - tree[path_key] = {"id": folder_id} - loop_cnt += 1 - - if len(no_parents_yet) > 0: - log.debug("Some folders path are not resolved {}". - format(no_parents_yet)) - log.debug("Remove deleted folders from trash.") - - return tree - def is_active(self): """ Returns True if provider is activated, eg. has working credentials. @@ -204,6 +82,21 @@ class GDriveHandler(AbstractProvider): """ return self.active + def get_roots_config(self, anatomy=None): + """ + Returns root values for path resolving + + Use only Settings as GDrive cannot be modified by Local Settings + + Returns: + (dict) - {"root": {"root": "/My Drive"}} + OR + {"root": {"root_ONE": "value", "root_TWO":"value}} + Format is importing for usage of python's format ** approach + """ + # GDrive roots cannot be locally overridden + return self.presets['root'] + def get_tree(self): """ Building of the folder tree could be potentially expensive, @@ -217,26 +110,6 @@ class GDriveHandler(AbstractProvider): self._tree = self._build_tree(self.list_folders()) return self._tree - def get_roots_config(self): - """ - Returns value from presets of roots. It calculates with multi - roots. Config should be simple key value, or dictionary. - - Examples: - "root": "/My Drive" - OR - "root": {"root_ONE": "value", "root_TWO":"value} - Returns: - (dict) - {"root": {"root": "/My Drive"}} - OR - {"root": {"root_ONE": "value", "root_TWO":"value}} - Format is importing for usage of python's format ** approach - """ - roots = self.presets["root"] - if isinstance(roots, str): - roots = {"root": roots} - return roots - def create_folder(self, path): """ Create all nonexistent folders and subfolders in 'path'. @@ -510,20 +383,6 @@ class GDriveHandler(AbstractProvider): self.service.files().delete(fileId=file["id"], supportsAllDrives=True).execute() - def _get_folder_metadata(self, path): - """ - Get info about folder with 'path' - Args: - path (string): - - Returns: - (dictionary) with metadata or raises ValueError - """ - try: - return self.get_tree()[path] - except Exception: - raise ValueError("Uknown folder id {}".format(id)) - def list_folder(self, folder_path): """ List all files and subfolders of particular path non-recursively. @@ -678,15 +537,151 @@ class GDriveHandler(AbstractProvider): return return provider_presets - def resolve_path(self, path, root_config, anatomy=None): - if not root_config.get("root"): - root_config = {"root": root_config} + def _get_gd_service(self): + """ + Authorize client with 'credentials.json', uses service account. + Service account needs to have target folder shared with. + Produces service that communicates with GDrive API. + Returns: + None + """ + creds = service_account.Credentials.from_service_account_file( + self.presets["credentials_url"], + scopes=SCOPES) + service = build('drive', 'v3', + credentials=creds, cache_discovery=False) + return service + + def _prepare_root_info(self): + """ + Prepare info about roots and theirs folder ids from 'presets'. + Configuration might be for single or multiroot projects. + Regular My Drive and Shared drives are implemented, their root + folder ids need to be queried in slightly different way. + + Returns: + (dicts) of dicts where root folders are keys + """ + roots = {} + config_roots = self.get_roots_config() + for path in config_roots.values(): + if self.MY_DRIVE_STR in path: + roots[self.MY_DRIVE_STR] = self.service.files()\ + .get(fileId='root').execute() + else: + shared_drives = [] + page_token = None + + while True: + response = self.service.drives().list( + pageSize=100, + pageToken=page_token).execute() + shared_drives.extend(response.get('drives', [])) + page_token = response.get('nextPageToken', None) + if page_token is None: + break + + folders = path.split('/') + if len(folders) < 2: + raise ValueError("Wrong root folder definition {}". + format(path)) + + for shared_drive in shared_drives: + if folders[1] in shared_drive["name"]: + roots[shared_drive["name"]] = { + "name": shared_drive["name"], + "id": shared_drive["id"]} + if self.MY_DRIVE_STR not in roots: # add My Drive always + roots[self.MY_DRIVE_STR] = self.service.files() \ + .get(fileId='root').execute() + + return roots + + @time_function + def _build_tree(self, folders): + """ + Create in-memory structure resolving paths to folder id as + recursive querying might be slower. + Initialized in the time of class initialization. + Maybe should be persisted + Tree is structure of path to id: + '/ROOT': {'id': '1234567'} + '/ROOT/PROJECT_FOLDER': {'id':'222222'} + '/ROOT/PROJECT_FOLDER/Assets': {'id': '3434545'} + Args: + folders (list): list of dictionaries with folder metadata + Returns: + (dictionary) path as a key, folder id as a value + """ + log.debug("build_tree len {}".format(len(folders))) + root_ids = [] + default_root_id = None + tree = {} + ending_by = {} + for root_name, root in self.root.items(): # might be multiple roots + if root["id"] not in root_ids: + tree["/" + root_name] = {"id": root["id"]} + ending_by[root["id"]] = "/" + root_name + root_ids.append(root["id"]) + + if self.MY_DRIVE_STR == root_name: + default_root_id = root["id"] + + no_parents_yet = {} + while folders: + folder = folders.pop(0) + parents = folder.get("parents", []) + # weird cases, shared folders, etc, parent under root + if not parents: + parent = default_root_id + else: + parent = parents[0] + + if folder["id"] in root_ids: # do not process root + continue + + if parent in ending_by: + path_key = ending_by[parent] + "/" + folder["name"] + ending_by[folder["id"]] = path_key + tree[path_key] = {"id": folder["id"]} + else: + no_parents_yet.setdefault(parent, []).append((folder["id"], + folder["name"])) + loop_cnt = 0 + # break if looped more then X times - safety against infinite loop + while no_parents_yet and loop_cnt < 20: + + keys = list(no_parents_yet.keys()) + for parent in keys: + if parent in ending_by.keys(): + subfolders = no_parents_yet.pop(parent) + for folder_id, folder_name in subfolders: + path_key = ending_by[parent] + "/" + folder_name + ending_by[folder_id] = path_key + tree[path_key] = {"id": folder_id} + loop_cnt += 1 + + if len(no_parents_yet) > 0: + log.debug("Some folders path are not resolved {}". + format(no_parents_yet)) + log.debug("Remove deleted folders from trash.") + + return tree + + def _get_folder_metadata(self, path): + """ + Get info about folder with 'path' + Args: + path (string): + + Returns: + (dictionary) with metadata or raises ValueError + """ try: - return path.format(**root_config) - except KeyError: - msg = "Error in resolving remote root, unknown key" - log.error(msg) + return self.get_tree()[path] + except Exception: + raise ValueError("Uknown folder id {}".format(id)) def _handle_q(self, q, trashed=False): """ API list call contain trashed and hidden files/folder by default. diff --git a/openpype/modules/sync_server/providers/lib.py b/openpype/modules/sync_server/providers/lib.py index 144594ecbe..01a5d50ba5 100644 --- a/openpype/modules/sync_server/providers/lib.py +++ b/openpype/modules/sync_server/providers/lib.py @@ -1,4 +1,3 @@ -from enum import Enum from .gdrive import GDriveHandler from .local_drive import LocalDriveHandler @@ -25,7 +24,8 @@ class ProviderFactory: """ self.providers[provider] = (creator, batch_limit) - def get_provider(self, provider, site_name, tree=None, presets=None): + def get_provider(self, provider, project_name, site_name, + tree=None, presets=None): """ Returns new instance of provider client for specific site. One provider could have multiple sites. @@ -37,6 +37,7 @@ class ProviderFactory: provider (string): 'gdrive','S3' site_name (string): descriptor of site, different service accounts must have different site name + project_name (string): different projects could have diff. sites tree (dictionary): - folder paths to folder id structure presets (dictionary): config for provider and site (eg. "credentials_url"..) @@ -44,7 +45,8 @@ class ProviderFactory: (implementation of AbstractProvider) """ creator_info = self._get_creator_info(provider) - site = creator_info[0](site_name, tree, presets) # call init + # call init + site = creator_info[0](project_name, site_name, tree, presets) return site @@ -90,4 +92,4 @@ factory = ProviderFactory() # 7 denotes number of files that could be synced in single loop - learned by # trial and error factory.register_provider('gdrive', GDriveHandler, 7) -factory.register_provider('local_drive', LocalDriveHandler, 10) +factory.register_provider('local_drive', LocalDriveHandler, 50) diff --git a/openpype/modules/sync_server/providers/local_drive.py b/openpype/modules/sync_server/providers/local_drive.py index fa8dd4c183..1f4fca80eb 100644 --- a/openpype/modules/sync_server/providers/local_drive.py +++ b/openpype/modules/sync_server/providers/local_drive.py @@ -4,7 +4,7 @@ import shutil import threading import time -from openpype.api import Logger +from openpype.api import Logger, Anatomy from .abstract_provider import AbstractProvider log = Logger().get_logger("SyncServer") @@ -12,6 +12,14 @@ log = Logger().get_logger("SyncServer") class LocalDriveHandler(AbstractProvider): """ Handles required operations on mounted disks with OS """ + def __init__(self, project_name, site_name, tree=None, presets=None): + self.presets = None + self.active = False + self.project_name = project_name + self.site_name = site_name + + self.active = self.is_active() + def is_active(self): return True @@ -82,27 +90,37 @@ class LocalDriveHandler(AbstractProvider): os.makedirs(folder_path, exist_ok=True) return folder_path + def get_roots_config(self, anatomy=None): + """ + Returns root values for path resolving + + Takes value from Anatomy which takes values from Settings + overridden by Local Settings + + Returns: + (dict) - {"root": {"root": "/My Drive"}} + OR + {"root": {"root_ONE": "value", "root_TWO":"value}} + Format is importing for usage of python's format ** approach + """ + if not anatomy: + anatomy = Anatomy(self.project_name, + self._normalize_site_name(self.site_name)) + + return {'root': anatomy.roots} + def get_tree(self): return - def resolve_path(self, path, root_config, anatomy=None): - if root_config and not root_config.get("root"): - root_config = {"root": root_config} + def get_configurable_items_for_site(self): + """ + Returns list of items that should be configurable by User - try: - if not root_config: - raise KeyError - - path = path.format(**root_config) - except KeyError: - try: - path = anatomy.fill_root(path) - except KeyError: - msg = "Error in resolving local root from anatomy" - log.error(msg) - raise ValueError(msg) - - return path + Returns: + (list of dict) + [{key:"root", label:"root", value:"valueFromSettings"}] + """ + pass def _copy(self, source_path, target_path): print("copying {}->{}".format(source_path, target_path)) @@ -133,3 +151,9 @@ class LocalDriveHandler(AbstractProvider): ) target_file_size = os.path.getsize(target_path) time.sleep(0.5) + + def _normalize_site_name(self, site_name): + """Transform user id to 'local' for Local settings""" + if site_name != 'studio': + return 'local' + return site_name diff --git a/openpype/modules/sync_server/sync_server.py b/openpype/modules/sync_server/sync_server.py index 62a5dc675c..9b305a1b2e 100644 --- a/openpype/modules/sync_server/sync_server.py +++ b/openpype/modules/sync_server/sync_server.py @@ -1,1391 +1,225 @@ -from openpype.api import ( - Anatomy, - get_project_settings, - get_local_site_id) - +"""Python 3 only implementation.""" +import os +import asyncio import threading import concurrent.futures from concurrent.futures._base import CancelledError -from enum import Enum -from datetime import datetime - from .providers import lib -import os -from bson.objectid import ObjectId - -from avalon.api import AvalonMongoDB -from .utils import time_function - -import six from openpype.lib import PypeLogger -from .. import PypeModule, ITrayModule -from .providers.local_drive import LocalDriveHandler -if six.PY2: - web = asyncio = STATIC_DIR = WebSocketAsync = None -else: - import asyncio +from .utils import SyncStatus, ResumableError + log = PypeLogger().get_logger("SyncServer") -class SyncStatus(Enum): - DO_NOTHING = 0 - DO_UPLOAD = 1 - DO_DOWNLOAD = 2 - - -class SyncServer(PypeModule, ITrayModule): +async def upload(module, collection, file, representation, provider_name, + remote_site_name, tree=None, preset=None): """ - Synchronization server that is syncing published files from local to - any of implemented providers (like GDrive, S3 etc.) - Runs in the background and checks all representations, looks for files - that are marked to be in different location than 'studio' (temporary), - checks if 'created_dt' field is present denoting successful sync - with provider destination. - Sites structure is created during publish OR by calling 'add_site' - method. + Upload single 'file' of a 'representation' to 'provider'. + Source url is taken from 'file' portion, where {root} placeholder + is replaced by 'representation.Context.root' + Provider could be one of implemented in provider.py. - By default it will always contain 1 record with - "name" == self.presets["active_site"] and - filled "created_dt" AND 1 or multiple records for all defined - remote sites, where "created_dt" is not present. - This highlights that file should be uploaded to - remote destination + Updates MongoDB, fills in id of file from provider (ie. file_id + from GDrive), 'created_dt' - time of upload - ''' - example of synced file test_Cylinder_lookMain_v010.ma to GDrive - "files" : [ - { - "path" : "{root}/Test/Assets/Cylinder/publish/look/lookMain/v010/ - test_Cylinder_lookMain_v010.ma", - "_id" : ObjectId("5eeb25e411e06a16209ab78f"), - "hash" : "test_Cylinder_lookMain_v010,ma|1592468963,24|4822", - "size" : NumberLong(4822), - "sites" : [ - { - "name": "john_local_XD4345", - "created_dt" : ISODate("2020-05-22T08:05:44.000Z") - }, - { - "id" : ObjectId("5eeb25e411e06a16209ab78f"), - "name": "gdrive", - "created_dt" : ISODate("2020-05-55T08:54:35.833Z") - ] - } - }, - ''' - Each Tray app has assigned its own self.presets["local_id"] - used in sites as a name. - Tray is searching only for records where name matches its - self.presets["active_site"] + self.presets["remote_site"]. - "active_site" could be storage in studio ('studio'), or specific - "local_id" when user is working disconnected from home. - If the local record has its "created_dt" filled, it is a source and - process will try to upload the file to all defined remote sites. + 'provider_name' doesn't have to match to 'site_name', single + provider (GDrive) might have multiple sites ('projectA', + 'projectB') - Remote files "id" is real id that could be used in appropriate API. - Local files have "id" too, for conformity, contains just file name. - It is expected that multiple providers will be implemented in separate - classes and registered in 'providers.py'. + Args: + module(SyncServerModule): object to run SyncServerModule API + collection (str): source collection + file (dictionary): of file from representation in Mongo + representation (dictionary): of representation + provider_name (string): gdrive, gdc etc. + site_name (string): site on provider, single provider(gdrive) could + have multiple sites (different accounts, credentials) + tree (dictionary): injected memory structure for performance + preset (dictionary): site config ('credentials_url', 'root'...) """ - # limit querying DB to look for X number of representations that should - # be sync, we try to run more loops with less records - # actual number of files synced could be lower as providers can have - # different limits imposed by its API - # set 0 to no limit - REPRESENTATION_LIMIT = 100 - DEFAULT_SITE = 'studio' - LOCAL_SITE = 'local' - LOG_PROGRESS_SEC = 5 # how often log progress to DB + # create ids sequentially, upload file in parallel later + with module.lock: + # this part modifies structure on 'remote_site', only single + # thread can do that at a time, upload/download to prepared + # structure should be run in parallel + remote_handler = lib.factory.get_provider(provider_name, + collection, + remote_site_name, + tree=tree, + presets=preset) - name = "sync_server" - label = "Sync Server" - - def initialize(self, module_settings): - """ - Called during Module Manager creation. - - Collects needed data, checks asyncio presence. - Sets 'enabled' according to global settings for the module. - Shouldnt be doing any initialization, thats a job for 'tray_init' - """ - self.enabled = module_settings[self.name]["enabled"] - if asyncio is None: - raise AssertionError( - "SyncServer module requires Python 3.5 or higher." + file_path = file.get("path", "") + try: + local_file_path, remote_file_path = resolve_paths(module, + file_path, collection, remote_site_name, remote_handler ) - # some parts of code need to run sequentially, not in async - self.lock = None - self.connection = None # connection to avalon DB to update state - # settings for all enabled projects for sync - self.sync_project_settings = None - self.sync_server_thread = None # asyncio requires new thread - - self.action_show_widget = None - self._paused = False - self._paused_projects = set() - self._paused_representations = set() - self._anatomies = {} - - """ Start of Public API """ - def add_site(self, collection, representation_id, site_name=None): - """ - Adds new site to representation to be synced. - - 'collection' must have synchronization enabled (globally or - project only) - - Used as a API endpoint from outside applications (Loader etc) - - Args: - collection (string): project name (must match DB) - representation_id (string): MongoDB _id value - site_name (string): name of configured and active site - - Returns: - throws ValueError if any issue - """ - if not self.get_sync_project_setting(collection): - raise ValueError("Project not configured") - - if not site_name: - site_name = self.DEFAULT_SITE - - self.reset_provider_for_file(collection, - representation_id, - site_name=site_name) - - # public facing API - def remove_site(self, collection, representation_id, site_name, - remove_local_files=False): - """ - Removes 'site_name' for particular 'representation_id' on - 'collection' - - Args: - collection (string): project name (must match DB) - representation_id (string): MongoDB _id value - site_name (string): name of configured and active site - remove_local_files (bool): remove only files for 'local_id' - site - - Returns: - throws ValueError if any issue - """ - if not self.get_sync_project_setting(collection): - raise ValueError("Project not configured") - - self.reset_provider_for_file(collection, - representation_id, - site_name=site_name, - remove=True) - if remove_local_files: - self._remove_local_file(collection, representation_id, site_name) - - def clear_project(self, collection, site_name): - """ - Clear 'collection' of 'site_name' and its local files - - Works only on real local sites, not on 'studio' - """ - query = { - "type": "representation", - "files.sites.name": site_name - } - - representations = list( - self.connection.database[collection].find(query)) - if not representations: - self.log.debug("No repre found") - return - - for repre in representations: - self.remove_site(collection, repre.get("_id"), site_name, True) - - def pause_representation(self, collection, representation_id, site_name): - """ - Sets 'representation_id' as paused, eg. no syncing should be - happening on it. - - Args: - collection (string): project name - representation_id (string): MongoDB objectId value - site_name (string): 'gdrive', 'studio' etc. - """ - log.info("Pausing SyncServer for {}".format(representation_id)) - self._paused_representations.add(representation_id) - self.reset_provider_for_file(collection, representation_id, - site_name=site_name, pause=True) - - def unpause_representation(self, collection, representation_id, site_name): - """ - Sets 'representation_id' as unpaused. - - Does not fail or warn if repre wasn't paused. - - Args: - collection (string): project name - representation_id (string): MongoDB objectId value - site_name (string): 'gdrive', 'studio' etc. - """ - log.info("Unpausing SyncServer for {}".format(representation_id)) - try: - self._paused_representations.remove(representation_id) - except KeyError: - pass - # self.paused_representations is not persistent - self.reset_provider_for_file(collection, representation_id, - site_name=site_name, pause=False) - - def is_representation_paused(self, representation_id, - check_parents=False, project_name=None): - """ - Returns if 'representation_id' is paused or not. - - Args: - representation_id (string): MongoDB objectId value - check_parents (bool): check if parent project or server itself - are not paused - project_name (string): project to check if paused - - if 'check_parents', 'project_name' should be set too - Returns: - (bool) - """ - condition = representation_id in self._paused_representations - if check_parents and project_name: - condition = condition or \ - self.is_project_paused(project_name) or \ - self.is_paused() - return condition - - def pause_project(self, project_name): - """ - Sets 'project_name' as paused, eg. no syncing should be - happening on all representation inside. - - Args: - project_name (string): collection name - """ - log.info("Pausing SyncServer for {}".format(project_name)) - self._paused_projects.add(project_name) - - def unpause_project(self, project_name): - """ - Sets 'project_name' as unpaused - - Does not fail or warn if project wasn't paused. - - Args: - project_name (string): collection name - """ - log.info("Unpausing SyncServer for {}".format(project_name)) - try: - self._paused_projects.remove(project_name) - except KeyError: - pass - - def is_project_paused(self, project_name, check_parents=False): - """ - Returns if 'project_name' is paused or not. - - Args: - project_name (string): collection name - check_parents (bool): check if server itself - is not paused - Returns: - (bool) - """ - condition = project_name in self._paused_projects - if check_parents: - condition = condition or self.is_paused() - return condition - - def pause_server(self): - """ - Pause sync server - - It won't check anything, not uploading/downloading... - """ - log.info("Pausing SyncServer") - self._paused = True - - def unpause_server(self): - """ - Unpause server - """ - log.info("Unpausing SyncServer") - self._paused = False - - def is_paused(self): - """ Is server paused """ - return self._paused - - def get_active_sites(self, project_name): - """ - Returns list of active sites for 'project_name'. - - By default it returns ['studio'], this site is default - and always present even if SyncServer is not enabled. (for publish) - - Used mainly for Local settings for user override. - - Args: - project_name (string): - - Returns: - (list) of strings - """ - return self.get_active_sites_from_settings( - get_project_settings(project_name)) - - def get_active_sites_from_settings(self, settings): - """ - List available active sites from incoming 'settings'. Used for - returning 'default' values for Local Settings - - Args: - settings (dict): full settings (global + project) - Returns: - (list) of strings - """ - sync_settings = self._parse_sync_settings_from_settings(settings) - - return self._get_active_sites_from_settings(sync_settings) - - def get_active_site(self, project_name): - """ - Returns active (mine) site for 'project_name' from settings - - Returns: - (string) - """ - active_site = self.get_sync_project_setting( - project_name)['config']['active_site'] - if active_site == self.LOCAL_SITE: - return get_local_site_id() - return active_site - - # remote sites - def get_remote_sites(self, project_name): - """ - Returns all remote sites configured on 'project_name'. - - If 'project_name' is not enabled for syncing returns []. - - Used by Local setting to allow user choose remote site. - - Args: - project_name (string): - - Returns: - (list) of strings - """ - return self.get_remote_sites_from_settings( - get_project_settings(project_name)) - - def get_remote_sites_from_settings(self, settings): - """ - Get remote sites for returning 'default' values for Local Settings - """ - sync_settings = self._parse_sync_settings_from_settings(settings) - - return self._get_remote_sites_from_settings(sync_settings) - - def get_remote_site(self, project_name): - """ - Returns remote (theirs) site for 'project_name' from settings - """ - remote_site = self.get_sync_project_setting( - project_name)['config']['remote_site'] - if remote_site == self.LOCAL_SITE: - return get_local_site_id() - - return remote_site - - """ End of Public API """ - - def get_local_file_path(self, collection, file_path): - """ - Externalized for app - """ - local_file_path, _ = self._resolve_paths(file_path, collection) - - return local_file_path - - def _get_remote_sites_from_settings(self, sync_settings): - if not self.enabled or not sync_settings['enabled']: - return [] - - remote_sites = [self.DEFAULT_SITE, self.LOCAL_SITE] - if sync_settings: - remote_sites.extend(sync_settings.get("sites").keys()) - - return list(set(remote_sites)) - - def _get_active_sites_from_settings(self, sync_settings): - sites = [self.DEFAULT_SITE] - if self.enabled and sync_settings['enabled']: - sites.append(self.LOCAL_SITE) - - return sites - - def connect_with_modules(self, *_a, **kw): - return - - def tray_init(self): - """ - Actual initialization of Sync Server. - - Called when tray is initialized, it checks if module should be - enabled. If not, no initialization necessary. - """ - if not self.enabled: - return - - self.sync_project_settings = None - self.lock = threading.Lock() - - self.connection = AvalonMongoDB() - self.connection.install() - - try: - self.set_sync_project_settings() - self.sync_server_thread = SyncServerThread(self) - from .tray.app import SyncServerWindow - self.widget = SyncServerWindow(self) - except ValueError: - log.info("No system setting for sync. Not syncing.", exc_info=True) - self.enabled = False - except KeyError: - log.info(( - "There are not set presets for SyncServer OR " - "Credentials provided are invalid, " - "no syncing possible"). - format(str(self.sync_project_settings)), exc_info=True) - self.enabled = False - - def tray_start(self): - """ - Triggered when Tray is started. - - Checks if configuration presets are available and if there is - any provider ('gdrive', 'S3') that is activated - (eg. has valid credentials). + except Exception as exp: + print(exp) + + target_folder = os.path.dirname(remote_file_path) + folder_id = remote_handler.create_folder(target_folder) + + if not folder_id: + err = "Folder {} wasn't created. Check permissions.". \ + format(target_folder) + raise NotADirectoryError(err) + + loop = asyncio.get_running_loop() + file_id = await loop.run_in_executor(None, + remote_handler.upload_file, + local_file_path, + remote_file_path, + module, + collection, + file, + representation, + remote_site_name, + True + ) + return file_id + + +async def download(module, collection, file, representation, provider_name, + remote_site_name, tree=None, preset=None): + """ + Downloads file to local folder denoted in representation.Context. + + Args: + module(SyncServerModule): object to run SyncServerModule API + collection (str): source collection + file (dictionary) : info about processed file + representation (dictionary): repr that 'file' belongs to + provider_name (string): 'gdrive' etc + site_name (string): site on provider, single provider(gdrive) could + have multiple sites (different accounts, credentials) + tree (dictionary): injected memory structure for performance + preset (dictionary): site config ('credentials_url', 'root'...) Returns: - None - """ - if self.sync_project_settings and self.enabled: - self.sync_server_thread.start() - else: - log.info("No presets or active providers. " + - "Synchronization not possible.") + (string) - 'name' of local file + """ + with module.lock: + remote_handler = lib.factory.get_provider(provider_name, + collection, + remote_site_name, + tree=tree, + presets=preset) - def tray_exit(self): - """ - Stops sync thread if running. + file_path = file.get("path", "") + local_file_path, remote_file_path = resolve_paths( + module, file_path, collection, remote_site_name, remote_handler + ) - Called from Module Manager - """ - if not self.sync_server_thread: - return + local_folder = os.path.dirname(local_file_path) + os.makedirs(local_folder, exist_ok=True) - if not self.is_running: - return - try: - log.info("Stopping sync server server") - self.sync_server_thread.is_running = False - self.sync_server_thread.stop() - except Exception: - log.warning( - "Error has happened during Killing sync server", - exc_info=True - ) + local_site = module.get_active_site(collection) - def tray_menu(self, parent_menu): - if not self.enabled: - return + loop = asyncio.get_running_loop() + file_id = await loop.run_in_executor(None, + remote_handler.download_file, + remote_file_path, + local_file_path, + module, + collection, + file, + representation, + local_site, + True + ) + return file_id - from Qt import QtWidgets - """Add menu or action to Tray(or parent)'s menu""" - action = QtWidgets.QAction("SyncServer", parent_menu) - action.triggered.connect(self.show_widget) - parent_menu.addAction(action) - parent_menu.addSeparator() - self.action_show_widget = action +def resolve_paths(module, file_path, collection, + remote_site_name=None, remote_handler=None): + """ + Returns tuple of local and remote file paths with {root} + placeholders replaced with proper values from Settings or Anatomy - @property - def is_running(self): - return self.sync_server_thread.is_running + Ejected here because of Python 2 hosts (GDriveHandler is an issue) - def get_anatomy(self, project_name): - """ - Get already created or newly created anatomy for project - - Args: - project_name (string): - - Return: - (Anatomy) - """ - return self._anatomies.get('project_name') or Anatomy(project_name) - - def set_sync_project_settings(self): - """ - Set sync_project_settings for all projects (caching) - - For performance - """ - sync_project_settings = {} - if not self.connection: - self.connection = AvalonMongoDB() - self.connection.install() - - for collection in self.connection.database.collection_names(False): - sync_settings = self._parse_sync_settings_from_settings( - get_project_settings(collection)) - if sync_settings: - default_sites = self._get_default_site_configs() - sync_settings['sites'].update(default_sites) - sync_project_settings[collection] = sync_settings - - if not sync_project_settings: - log.info("No enabled and configured projects for sync.") - - self.sync_project_settings = sync_project_settings - - def get_sync_project_settings(self, refresh=False): - """ - Collects all projects which have enabled syncing and their settings Args: - refresh (bool): refresh presets from settings - used when user - changes site in Local Settings or any time up-to-date values - are necessary + module(SyncServerModule): object to run SyncServerModule API + file_path(string): path with {root} + collection(string): project name + remote_site_name(string): remote site + remote_handler(AbstractProvider): implementation Returns: - (dict): of settings, keys are project names - {'projectA':{enabled: True, sites:{}...} - """ - # presets set already, do not call again and again - if refresh or not self.sync_project_settings: - self.set_sync_project_settings() + (string, string) - proper absolute paths, remote path is optional + """ + remote_file_path = '' + if remote_handler: + remote_file_path = remote_handler.resolve_path(file_path) - return self.sync_project_settings + local_handler = lib.factory.get_provider( + 'local_drive', collection, module.get_active_site(collection)) + local_file_path = local_handler.resolve_path(file_path) - def get_sync_project_setting(self, project_name): - """ Handles pulling sync_server's settings for enabled 'project_name' + return local_file_path, remote_file_path - Args: - project_name (str): used in project settings - Returns: - (dict): settings dictionary for the enabled project, - empty if no settings or sync is disabled - """ - # presets set already, do not call again and again - # self.log.debug("project preset {}".format(self.presets)) - if self.sync_project_settings and \ - self.sync_project_settings.get(project_name): - return self.sync_project_settings.get(project_name) - settings = get_project_settings(project_name) - return self._parse_sync_settings_from_settings(settings) +def site_is_working(module, project_name, site_name): + """ + Confirm that 'site_name' is configured correctly for 'project_name'. - def site_is_working(self, project_name, site_name): - """ - Confirm that 'site_name' is configured correctly for 'project_name' - Args: - project_name(string): - site_name(string): - Returns - (bool) - """ - if self._get_configured_sites(project_name).get(site_name): - return True - return False + Must be here as lib.factory access doesn't work in Python 2 hosts. - def _parse_sync_settings_from_settings(self, settings): - """ settings from api.get_project_settings, TOOD rename """ - sync_settings = settings.get("global").get("sync_server") - if not sync_settings: - log.info("No project setting not syncing.") - return {} - if sync_settings.get("enabled"): - return sync_settings + Args: + module (SyncServerModule) + project_name(string): + site_name(string): + Returns + (bool) + """ + if _get_configured_sites(module, project_name).get(site_name): + return True + return False + +def _get_configured_sites(module, project_name): + """ + Loops through settings and looks for configured sites and checks + its handlers for particular 'project_name'. + + Args: + project_setting(dict): dictionary from Settings + only_project_name(string, optional): only interested in + particular project + Returns: + (dict of dict) + {'ProjectA': {'studio':True, 'gdrive':False}} + """ + settings = module.get_sync_project_setting(project_name) + return _get_configured_sites_from_setting(module, project_name, settings) + + +def _get_configured_sites_from_setting(module, project_name, project_setting): + if not project_setting.get("enabled"): return {} - def _get_configured_sites(self, project_name): - """ - Loops through settings and looks for configured sites and checks - its handlers for particular 'project_name'. - - Args: - project_setting(dict): dictionary from Settings - only_project_name(string, optional): only interested in - particular project - Returns: - (dict of dict) - {'ProjectA': {'studio':True, 'gdrive':False}} - """ - settings = self.get_sync_project_setting(project_name) - return self._get_configured_sites_from_setting(settings) - - def _get_configured_sites_from_setting(self, project_setting): - if not project_setting.get("enabled"): - return {} - - initiated_handlers = {} - configured_sites = {} - all_sites = self._get_default_site_configs() - all_sites.update(project_setting.get("sites")) - for site_name, config in all_sites.items(): - handler = initiated_handlers. \ - get((config["provider"], site_name)) - if not handler: - handler = lib.factory.get_provider(config["provider"], - site_name, - presets=config) - initiated_handlers[(config["provider"], site_name)] = \ - handler - - if handler.is_active(): - configured_sites[site_name] = True - - return configured_sites - - def _get_default_site_configs(self): - """ - Returns skeleton settings for 'studio' and user's local site - """ - default_config = {'provider': 'local_drive'} - all_sites = {self.DEFAULT_SITE: default_config, - get_local_site_id(): default_config} - return all_sites - - def get_provider_for_site(self, project_name, site): - """ - Return provider name for site. - """ - site_preset = self.get_sync_project_setting(project_name)["sites"].\ - get(site) - if site_preset: - return site_preset["provider"] - - return "NA" - - @time_function - def get_sync_representations(self, collection, active_site, remote_site): - """ - Get representations that should be synced, these could be - recognised by presence of document in 'files.sites', where key is - a provider (GDrive, S3) and value is empty document or document - without 'created_dt' field. (Don't put null to 'created_dt'!). - - Querying of 'to-be-synched' files is offloaded to Mongod for - better performance. Goal is to get as few representations as - possible. - Args: - collection (string): name of collection (in most cases matches - project name - active_site (string): identifier of current active site (could be - 'local_0' when working from home, 'studio' when working in the - studio (default) - remote_site (string): identifier of remote site I want to sync to - - Returns: - (list) of dictionaries - """ - log.debug("Check representations for : {}".format(collection)) - self.connection.Session["AVALON_PROJECT"] = collection - # retry_cnt - number of attempts to sync specific file before giving up - retries_arr = self._get_retries_arr(collection) - query = { - "type": "representation", - "$or": [ - {"$and": [ - { - "files.sites": { - "$elemMatch": { - "name": active_site, - "created_dt": {"$exists": True} - } - }}, { - "files.sites": { - "$elemMatch": { - "name": {"$in": [remote_site]}, - "created_dt": {"$exists": False}, - "tries": {"$in": retries_arr} - } - } - }]}, - {"$and": [ - { - "files.sites": { - "$elemMatch": { - "name": active_site, - "created_dt": {"$exists": False}, - "tries": {"$in": retries_arr} - } - }}, { - "files.sites": { - "$elemMatch": { - "name": {"$in": [remote_site]}, - "created_dt": {"$exists": True} - } - } - } - ]} - ] - } - log.debug("active_site:{} - remote_site:{}".format(active_site, - remote_site)) - log.debug("query: {}".format(query)) - representations = self.connection.find(query) - - return representations - - def check_status(self, file, local_site, remote_site, config_preset): - """ - Check synchronization status for single 'file' of single - 'representation' by single 'provider'. - (Eg. check if 'scene.ma' of lookdev.v10 should be synced to GDrive - - Always is comparing local record, eg. site with - 'name' == self.presets[PROJECT_NAME]['config']["active_site"] - - Args: - file (dictionary): of file from representation in Mongo - local_site (string): - local side of compare (usually 'studio') - remote_site (string): - gdrive etc. - config_preset (dict): config about active site, retries - Returns: - (string) - one of SyncStatus - """ - sites = file.get("sites") or [] - # if isinstance(sites, list): # temporary, old format of 'sites' - # return SyncStatus.DO_NOTHING - _, remote_rec = self._get_site_rec(sites, remote_site) or {} - if remote_rec: # sync remote target - created_dt = remote_rec.get("created_dt") - if not created_dt: - tries = self._get_tries_count_from_rec(remote_rec) - # file will be skipped if unsuccessfully tried over threshold - # error metadata needs to be purged manually in DB to reset - if tries < int(config_preset["retry_cnt"]): - return SyncStatus.DO_UPLOAD - else: - _, local_rec = self._get_site_rec(sites, local_site) or {} - if not local_rec or not local_rec.get("created_dt"): - tries = self._get_tries_count_from_rec(local_rec) - # file will be skipped if unsuccessfully tried over - # threshold times, error metadata needs to be purged - # manually in DB to reset - if tries < int(config_preset["retry_cnt"]): - return SyncStatus.DO_DOWNLOAD - - return SyncStatus.DO_NOTHING - - async def upload(self, collection, file, representation, provider_name, - remote_site_name, tree=None, preset=None): - """ - Upload single 'file' of a 'representation' to 'provider'. - Source url is taken from 'file' portion, where {root} placeholder - is replaced by 'representation.Context.root' - Provider could be one of implemented in provider.py. - - Updates MongoDB, fills in id of file from provider (ie. file_id - from GDrive), 'created_dt' - time of upload - - 'provider_name' doesn't have to match to 'site_name', single - provider (GDrive) might have multiple sites ('projectA', - 'projectB') - - Args: - collection (str): source collection - file (dictionary): of file from representation in Mongo - representation (dictionary): of representation - provider_name (string): gdrive, gdc etc. - site_name (string): site on provider, single provider(gdrive) could - have multiple sites (different accounts, credentials) - tree (dictionary): injected memory structure for performance - preset (dictionary): site config ('credentials_url', 'root'...) - - """ - # create ids sequentially, upload file in parallel later - with self.lock: - # this part modifies structure on 'remote_site', only single - # thread can do that at a time, upload/download to prepared - # structure should be run in parallel - remote_handler = lib.factory.get_provider(provider_name, - remote_site_name, - tree=tree, - presets=preset) - - file_path = file.get("path", "") - local_file_path, remote_file_path = self._resolve_paths( - file_path, collection, remote_site_name, remote_handler - ) - - target_folder = os.path.dirname(remote_file_path) - folder_id = remote_handler.create_folder(target_folder) - - if not folder_id: - err = "Folder {} wasn't created. Check permissions.".\ - format(target_folder) - raise NotADirectoryError(err) - - loop = asyncio.get_running_loop() - file_id = await loop.run_in_executor(None, - remote_handler.upload_file, - local_file_path, - remote_file_path, - self, - collection, - file, - representation, - remote_site_name, - True - ) - return file_id - - async def download(self, collection, file, representation, provider_name, - remote_site_name, tree=None, preset=None): - """ - Downloads file to local folder denoted in representation.Context. - - Args: - collection (str): source collection - file (dictionary) : info about processed file - representation (dictionary): repr that 'file' belongs to - provider_name (string): 'gdrive' etc - site_name (string): site on provider, single provider(gdrive) could - have multiple sites (different accounts, credentials) - tree (dictionary): injected memory structure for performance - preset (dictionary): site config ('credentials_url', 'root'...) - - Returns: - (string) - 'name' of local file - """ - with self.lock: - remote_handler = lib.factory.get_provider(provider_name, - remote_site_name, - tree=tree, - presets=preset) - - file_path = file.get("path", "") - local_file_path, remote_file_path = self._resolve_paths( - file_path, collection, remote_site_name, remote_handler - ) - - local_folder = os.path.dirname(local_file_path) - os.makedirs(local_folder, exist_ok=True) - - local_site = self.get_active_site(collection) - - loop = asyncio.get_running_loop() - file_id = await loop.run_in_executor(None, - remote_handler.download_file, - remote_file_path, - local_file_path, - self, - collection, - file, - representation, - local_site, - True - ) - return file_id - - def update_db(self, collection, new_file_id, file, representation, - site, error=None, progress=None): - """ - Update 'provider' portion of records in DB with success (file_id) - or error (exception) - - Args: - collection (string): name of project - force to db connection as - each file might come from different collection - new_file_id (string): - file (dictionary): info about processed file (pulled from DB) - representation (dictionary): parent repr of file (from DB) - site (string): label ('gdrive', 'S3') - error (string): exception message - progress (float): 0-1 of progress of upload/download - - Returns: - None - """ - representation_id = representation.get("_id") - file_id = file.get("_id") - query = { - "_id": representation_id - } - - update = {} - if new_file_id: - update["$set"] = self._get_success_dict(new_file_id) - # reset previous errors if any - update["$unset"] = self._get_error_dict("", "", "") - elif progress is not None: - update["$set"] = self._get_progress_dict(progress) - else: - tries = self._get_tries_count(file, site) - tries += 1 - - update["$set"] = self._get_error_dict(error, tries) - - arr_filter = [ - {'s.name': site}, - {'f._id': ObjectId(file_id)} - ] - - self.connection.database[collection].update_one( - query, - update, - upsert=True, - array_filters=arr_filter - ) - - if progress is not None: - return - - status = 'failed' - error_str = 'with error {}'.format(error) - if new_file_id: - status = 'succeeded with id {}'.format(new_file_id) - error_str = '' - - source_file = file.get("path", "") - log.debug("File for {} - {source_file} process {status} {error_str}". - format(representation_id, - status=status, - source_file=source_file, - error_str=error_str)) - - def _get_file_info(self, files, _id): - """ - Return record from list of records which name matches to 'provider' - Could be possibly refactored with '_get_provider_rec' together. - - Args: - files (list): of dictionaries with info about published files - _id (string): _id of specific file - - Returns: - (int, dictionary): index from list and record with metadata - about site (if/when created, errors..) - OR (-1, None) if not present - """ - for index, rec in enumerate(files): - if rec.get("_id") == _id: - return index, rec - - return -1, None - - def _get_site_rec(self, sites, site_name): - """ - Return record from list of records which name matches to - 'remote_site_name' - - Args: - sites (list): of dictionaries - site_name (string): 'local_XXX', 'gdrive' - - Returns: - (int, dictionary): index from list and record with metadata - about site (if/when created, errors..) - OR (-1, None) if not present - """ - for index, rec in enumerate(sites): - if rec.get("name") == site_name: - return index, rec - - return -1, None - - def reset_provider_for_file(self, collection, representation_id, - side=None, file_id=None, site_name=None, - remove=False, pause=None): - """ - Reset information about synchronization for particular 'file_id' - and provider. - Useful for testing or forcing file to be reuploaded. - - 'side' and 'site_name' are disjunctive. - - 'side' is used for resetting local or remote side for - current user for repre. - - 'site_name' is used to set synchronization for particular site. - Should be used when repre should be synced to new site. - - Args: - collection (string): name of project (eg. collection) in DB - representation_id(string): _id of representation - file_id (string): file _id in representation - side (string): local or remote side - site_name (string): for adding new site - remove (bool): if True remove site altogether - pause (bool or None): if True - pause, False - unpause - - Returns: - throws ValueError - """ - query = { - "_id": ObjectId(representation_id) - } - - representation = list(self.connection.database[collection].find(query)) - if not representation: - raise ValueError("Representation {} not found in {}". - format(representation_id, collection)) - if side and site_name: - raise ValueError("Misconfiguration, only one of side and " + - "site_name arguments should be passed.") - - local_site = self.get_active_site(collection) - remote_site = self.get_remote_site(collection) - - if side: - if side == 'local': - site_name = local_site - else: - site_name = remote_site - - elem = {"name": site_name} - - if file_id: # reset site for particular file - self._reset_site_for_file(collection, query, - elem, file_id, site_name) - elif side: # reset site for whole representation - self._reset_site(collection, query, elem, site_name) - elif remove: # remove site for whole representation - self._remove_site(collection, query, representation, site_name) - elif pause is not None: - self._pause_unpause_site(collection, query, - representation, site_name, pause) - else: # add new site to all files for representation - self._add_site(collection, query, representation, elem, site_name) - - def _update_site(self, collection, query, update, arr_filter): - """ - Auxiliary method to call update_one function on DB - - Used for refactoring ugly reset_provider_for_file - """ - self.connection.database[collection].update_one( - query, - update, - upsert=True, - array_filters=arr_filter - ) - - def _reset_site_for_file(self, collection, query, - elem, file_id, site_name): - """ - Resets 'site_name' for 'file_id' on representation in 'query' on - 'collection' - """ - update = { - "$set": {"files.$[f].sites.$[s]": elem} - } - arr_filter = [ - {'s.name': site_name}, - {'f._id': ObjectId(file_id)} - ] - - self._update_site(collection, query, update, arr_filter) - - def _reset_site(self, collection, query, elem, site_name): - """ - Resets 'site_name' for all files of representation in 'query' - """ - update = { - "$set": {"files.$[].sites.$[s]": elem} - } - - arr_filter = [ - {'s.name': site_name} - ] - - self._update_site(collection, query, update, arr_filter) - - def _remove_site(self, collection, query, representation, site_name): - """ - Removes 'site_name' for 'representation' in 'query' - - Throws ValueError if 'site_name' not found on 'representation' - """ - found = False - for file in representation.pop().get("files"): - for site in file.get("sites"): - if site["name"] == site_name: - found = True - break - if not found: - msg = "Site {} not found".format(site_name) - log.info(msg) - raise ValueError(msg) - - update = { - "$pull": {"files.$[].sites": {"name": site_name}} - } - arr_filter = [] - - self._update_site(collection, query, update, arr_filter) - - def _pause_unpause_site(self, collection, query, - representation, site_name, pause): - """ - Pauses/unpauses all files for 'representation' based on 'pause' - - Throws ValueError if 'site_name' not found on 'representation' - """ - found = False - site = None - for file in representation.pop().get("files"): - for site in file.get("sites"): - if site["name"] == site_name: - found = True - break - if not found: - msg = "Site {} not found".format(site_name) - log.info(msg) - raise ValueError(msg) - - if pause: - site['paused'] = pause - else: - if site.get('paused'): - site.pop('paused') - - update = { - "$set": {"files.$[].sites.$[s]": site} - } - - arr_filter = [ - {'s.name': site_name} - ] - - self._update_site(collection, query, update, arr_filter) - - def _add_site(self, collection, query, representation, elem, site_name): - """ - Adds 'site_name' to 'representation' on 'collection' - - Throws ValueError if already present - """ - for file in representation.pop().get("files"): - for site in file.get("sites"): - if site["name"] == site_name: - msg = "Site {} already present".format(site_name) - log.info(msg) - raise ValueError(msg) - - update = { - "$push": {"files.$[].sites": elem} - } - - arr_filter = [] - - self._update_site(collection, query, update, arr_filter) - - def _remove_local_file(self, collection, representation_id, site_name): - """ - Removes all local files for 'site_name' of 'representation_id' - - Args: - collection (string): project name (must match DB) - representation_id (string): MongoDB _id value - site_name (string): name of configured and active site - - Returns: - only logs, catches IndexError and OSError - """ - my_local_site = get_local_site_id() - if my_local_site != site_name: - self.log.warning("Cannot remove non local file for {}". - format(site_name)) - return - - provider_name = self.get_provider_for_site(collection, site_name) - handler = lib.factory.get_provider(provider_name, site_name) - - if handler and isinstance(handler, LocalDriveHandler): - query = { - "_id": ObjectId(representation_id) - } - - representation = list( - self.connection.database[collection].find(query)) - if not representation: - self.log.debug("No repre {} found".format( - representation_id)) - return - - representation = representation.pop() - local_file_path = '' - for file in representation.get("files"): - local_file_path, _ = self._resolve_paths(file.get("path", ""), - collection - ) - try: - self.log.debug("Removing {}".format(local_file_path)) - os.remove(local_file_path) - except IndexError: - msg = "No file set for {}".format(representation_id) - self.log.debug(msg) - raise ValueError(msg) - except OSError: - msg = "File {} cannot be removed".format(file["path"]) - self.log.warning(msg) - raise ValueError(msg) - - try: - folder = os.path.dirname(local_file_path) - os.rmdir(folder) - except OSError: - msg = "folder {} cannot be removed".format(folder) - self.log.warning(msg) - raise ValueError(msg) - - def get_loop_delay(self, project_name): - """ - Return count of seconds before next synchronization loop starts - after finish of previous loop. - Returns: - (int): in seconds - """ - ld = self.sync_project_settings[project_name]["config"]["loop_delay"] - return int(ld) - - def show_widget(self): - """Show dialog to enter credentials""" - self.widget.show() - - def _get_success_dict(self, new_file_id): - """ - Provide success metadata ("id", "created_dt") to be stored in Db. - Used in $set: "DICT" part of query. - Sites are array inside of array(file), so real indexes for both - file and site are needed for upgrade in DB. - Args: - new_file_id: id of created file - Returns: - (dictionary) - """ - val = {"files.$[f].sites.$[s].id": new_file_id, - "files.$[f].sites.$[s].created_dt": datetime.now()} - return val - - def _get_error_dict(self, error="", tries="", progress=""): - """ - Provide error metadata to be stored in Db. - Used for set (error and tries provided) or unset mode. - Args: - error: (string) - message - tries: how many times failed - Returns: - (dictionary) - """ - val = {"files.$[f].sites.$[s].last_failed_dt": datetime.now(), - "files.$[f].sites.$[s].error": error, - "files.$[f].sites.$[s].tries": tries, - "files.$[f].sites.$[s].progress": progress - } - return val - - def _get_tries_count_from_rec(self, rec): - """ - Get number of failed attempts to sync from site record - Args: - rec (dictionary): info about specific site record - Returns: - (int) - number of failed attempts - """ - if not rec: - return 0 - return rec.get("tries", 0) - - def _get_tries_count(self, file, provider): - """ - Get number of failed attempts to sync - Args: - file (dictionary): info about specific file - provider (string): name of site ('gdrive' or specific user site) - Returns: - (int) - number of failed attempts - """ - _, rec = self._get_site_rec(file.get("sites", []), provider) - return rec.get("tries", 0) - - def _get_progress_dict(self, progress): - """ - Provide progress metadata to be stored in Db. - Used during upload/download for GUI to show. - Args: - progress: (float) - 0-1 progress of upload/download - Returns: - (dictionary) - """ - val = {"files.$[f].sites.$[s].progress": progress} - return val - - def _resolve_paths(self, file_path, collection, - remote_site_name=None, remote_handler=None): - """ - Returns tuple of local and remote file paths with {root} - placeholders replaced with proper values from Settings or Anatomy - - Args: - file_path(string): path with {root} - collection(string): project name - remote_site_name(string): remote site - remote_handler(AbstractProvider): implementation - Returns: - (string, string) - proper absolute paths - """ - remote_file_path = '' - if remote_handler: - root_configs = self._get_roots_config(self.sync_project_settings, - collection, - remote_site_name) - - remote_file_path = remote_handler.resolve_path(file_path, - root_configs) - - local_handler = lib.factory.get_provider( - 'local_drive', self.get_active_site(collection)) - local_file_path = local_handler.resolve_path( - file_path, None, self.get_anatomy(collection)) - - return local_file_path, remote_file_path - - def _get_retries_arr(self, project_name): - """ - Returns array with allowed values in 'tries' field. If repre - contains these values, it means it was tried to be synchronized - but failed. We try up to 'self.presets["retry_cnt"]' times before - giving up and skipping representation. - Returns: - (list) - """ - retry_cnt = self.sync_project_settings[project_name].\ - get("config")["retry_cnt"] - arr = [i for i in range(int(retry_cnt))] - arr.append(None) - - return arr - - def _get_roots_config(self, presets, project_name, site_name): - """ - Returns configured root(s) for 'project_name' and 'site_name' from - settings ('presets') - """ - return presets[project_name]['sites'][site_name]['root'] - + initiated_handlers = {} + configured_sites = {} + all_sites = module._get_default_site_configs() + all_sites.update(project_setting.get("sites")) + for site_name, config in all_sites.items(): + handler = initiated_handlers. \ + get((config["provider"], site_name)) + if not handler: + handler = lib.factory.get_provider(config["provider"], + project_name, + site_name, + presets=config) + initiated_handlers[(config["provider"], site_name)] = \ + handler + + if handler.is_active(): + configured_sites[site_name] = True + + return configured_sites class SyncServerThread(threading.Thread): """ @@ -1398,6 +232,7 @@ class SyncServerThread(threading.Thread): self.loop = None self.is_running = False self.executor = concurrent.futures.ThreadPoolExecutor(max_workers=3) + self.timer = None def run(self): self.is_running = True @@ -1432,12 +267,12 @@ class SyncServerThread(threading.Thread): Returns: """ - try: - while self.is_running and not self.module.is_paused(): + while self.is_running and not self.module.is_paused(): + try: import time start_time = None self.module.set_sync_project_settings() # clean cache - for collection, preset in self.module.get_sync_project_settings().\ + for collection, preset in self.module.sync_project_settings.\ items(): start_time = time.time() local_site, remote_site = self._working_sites(collection) @@ -1462,6 +297,7 @@ class SyncServerThread(threading.Thread): site_preset = preset.get('sites')[remote_site] remote_provider = site_preset['provider'] handler = lib.factory.get_provider(remote_provider, + collection, remote_site, presets=site_preset) limit = lib.factory.get_provider_batch_limit( @@ -1491,13 +327,14 @@ class SyncServerThread(threading.Thread): tree = handler.get_tree() limit -= 1 task = asyncio.create_task( - self.module.upload(collection, - file, - sync, - remote_provider, - remote_site, - tree, - site_preset)) + upload(self.module, + collection, + file, + sync, + remote_provider, + remote_site, + tree, + site_preset)) task_files_to_process.append(task) # store info for exception handlingy files_processed_info.append((file, @@ -1510,13 +347,14 @@ class SyncServerThread(threading.Thread): tree = handler.get_tree() limit -= 1 task = asyncio.create_task( - self.module.download(collection, - file, - sync, - remote_provider, - remote_site, - tree, - site_preset)) + download(self.module, + collection, + file, + sync, + remote_provider, + remote_site, + tree, + site_preset)) task_files_to_process.append(task) files_processed_info.append((file, @@ -1547,17 +385,27 @@ class SyncServerThread(threading.Thread): duration = time.time() - start_time log.debug("One loop took {:.2f}s".format(duration)) - await asyncio.sleep(self.module.get_loop_delay(collection)) - except ConnectionResetError: - log.warning("ConnectionResetError in sync loop, trying next loop", - exc_info=True) - except CancelledError: - # just stopping server - pass - except Exception: - self.stop() - log.warning("Unhandled exception in sync loop, stopping server", - exc_info=True) + + delay = self.module.get_loop_delay(collection) + log.debug("Waiting for {} seconds to new loop".format(delay)) + self.timer = asyncio.create_task(self.run_timer(delay)) + await asyncio.gather(self.timer) + + except ConnectionResetError: + log.warning("ConnectionResetError in sync loop, " + "trying next loop", + exc_info=True) + except CancelledError: + # just stopping server + pass + except ResumableError: + log.warning("ResumableError in sync loop, " + "trying next loop", + exc_info=True) + except Exception: + self.stop() + log.warning("Unhandled except. in sync loop, stopping server", + exc_info=True) def stop(self): """Sets is_running flag to false, 'check_shutdown' shuts server down""" @@ -1580,6 +428,17 @@ class SyncServerThread(threading.Thread): await asyncio.sleep(0.07) self.loop.stop() + async def run_timer(self, delay): + """Wait for 'delay' seconds to start next loop""" + await asyncio.sleep(delay) + + def reset_timer(self): + """Called when waiting for next loop should be skipped""" + log.debug("Resetting timer") + if self.timer: + self.timer.cancel() + self.timer = None + def _working_sites(self, collection): if self.module.is_project_paused(collection): log.debug("Both sites same, skipping") @@ -1592,8 +451,8 @@ class SyncServerThread(threading.Thread): remote_site)) return None, None - if not all([self.module.site_is_working(collection, local_site), - self.module.site_is_working(collection, remote_site)]): + if not all([site_is_working(self.module, collection, local_site), + site_is_working(self.module, collection, remote_site)]): log.debug("Some of the sites {} - {} is not ".format(local_site, remote_site) + "working properly") diff --git a/openpype/modules/sync_server/sync_server_module.py b/openpype/modules/sync_server/sync_server_module.py new file mode 100644 index 0000000000..a434af9fea --- /dev/null +++ b/openpype/modules/sync_server/sync_server_module.py @@ -0,0 +1,1216 @@ +import os +from bson.objectid import ObjectId +from datetime import datetime +import threading + +from avalon.api import AvalonMongoDB + +from .. import PypeModule, ITrayModule +from openpype.api import ( + Anatomy, + get_project_settings, + get_local_site_id) +from openpype.lib import PypeLogger + +from .providers.local_drive import LocalDriveHandler + +from .utils import time_function, SyncStatus + + +log = PypeLogger().get_logger("SyncServer") + + +class SyncServerModule(PypeModule, ITrayModule): + """ + Synchronization server that is syncing published files from local to + any of implemented providers (like GDrive, S3 etc.) + Runs in the background and checks all representations, looks for files + that are marked to be in different location than 'studio' (temporary), + checks if 'created_dt' field is present denoting successful sync + with provider destination. + Sites structure is created during publish OR by calling 'add_site' + method. + + By default it will always contain 1 record with + "name" == self.presets["active_site"] and + filled "created_dt" AND 1 or multiple records for all defined + remote sites, where "created_dt" is not present. + This highlights that file should be uploaded to + remote destination + + ''' - example of synced file test_Cylinder_lookMain_v010.ma to GDrive + "files" : [ + { + "path" : "{root}/Test/Assets/Cylinder/publish/look/lookMain/v010/ + test_Cylinder_lookMain_v010.ma", + "_id" : ObjectId("5eeb25e411e06a16209ab78f"), + "hash" : "test_Cylinder_lookMain_v010,ma|1592468963,24|4822", + "size" : NumberLong(4822), + "sites" : [ + { + "name": "john_local_XD4345", + "created_dt" : ISODate("2020-05-22T08:05:44.000Z") + }, + { + "id" : ObjectId("5eeb25e411e06a16209ab78f"), + "name": "gdrive", + "created_dt" : ISODate("2020-05-55T08:54:35.833Z") + ] + } + }, + ''' + Each Tray app has assigned its own self.presets["local_id"] + used in sites as a name. + Tray is searching only for records where name matches its + self.presets["active_site"] + self.presets["remote_site"]. + "active_site" could be storage in studio ('studio'), or specific + "local_id" when user is working disconnected from home. + If the local record has its "created_dt" filled, it is a source and + process will try to upload the file to all defined remote sites. + + Remote files "id" is real id that could be used in appropriate API. + Local files have "id" too, for conformity, contains just file name. + It is expected that multiple providers will be implemented in separate + classes and registered in 'providers.py'. + + """ + # limit querying DB to look for X number of representations that should + # be sync, we try to run more loops with less records + # actual number of files synced could be lower as providers can have + # different limits imposed by its API + # set 0 to no limit + REPRESENTATION_LIMIT = 100 + DEFAULT_SITE = 'studio' + LOCAL_SITE = 'local' + LOG_PROGRESS_SEC = 5 # how often log progress to DB + + name = "sync_server" + label = "Sync Queue" + + def initialize(self, module_settings): + """ + Called during Module Manager creation. + + Collects needed data, checks asyncio presence. + Sets 'enabled' according to global settings for the module. + Shouldnt be doing any initialization, thats a job for 'tray_init' + """ + self.enabled = module_settings[self.name]["enabled"] + + # some parts of code need to run sequentially, not in async + self.lock = None + # settings for all enabled projects for sync + self._sync_project_settings = None + self.sync_server_thread = None # asyncio requires new thread + + self.action_show_widget = None + self._paused = False + self._paused_projects = set() + self._paused_representations = set() + self._anatomies = {} + + self._connection = None + + """ Start of Public API """ + def add_site(self, collection, representation_id, site_name=None, + force=False): + """ + Adds new site to representation to be synced. + + 'collection' must have synchronization enabled (globally or + project only) + + Used as a API endpoint from outside applications (Loader etc) + + Args: + collection (string): project name (must match DB) + representation_id (string): MongoDB _id value + site_name (string): name of configured and active site + force (bool): reset site if exists + + Returns: + throws ValueError if any issue + """ + if not self.get_sync_project_setting(collection): + raise ValueError("Project not configured") + + if not site_name: + site_name = self.DEFAULT_SITE + + self.reset_provider_for_file(collection, + representation_id, + site_name=site_name, force=force) + + # public facing API + def remove_site(self, collection, representation_id, site_name, + remove_local_files=False): + """ + Removes 'site_name' for particular 'representation_id' on + 'collection' + + Args: + collection (string): project name (must match DB) + representation_id (string): MongoDB _id value + site_name (string): name of configured and active site + remove_local_files (bool): remove only files for 'local_id' + site + + Returns: + throws ValueError if any issue + """ + if not self.get_sync_project_setting(collection): + raise ValueError("Project not configured") + + self.reset_provider_for_file(collection, + representation_id, + site_name=site_name, + remove=True) + if remove_local_files: + self._remove_local_file(collection, representation_id, site_name) + + def clear_project(self, collection, site_name): + """ + Clear 'collection' of 'site_name' and its local files + + Works only on real local sites, not on 'studio' + """ + query = { + "type": "representation", + "files.sites.name": site_name + } + + representations = list( + self.connection.database[collection].find(query)) + if not representations: + self.log.debug("No repre found") + return + + for repre in representations: + self.remove_site(collection, repre.get("_id"), site_name, True) + + def pause_representation(self, collection, representation_id, site_name): + """ + Sets 'representation_id' as paused, eg. no syncing should be + happening on it. + + Args: + collection (string): project name + representation_id (string): MongoDB objectId value + site_name (string): 'gdrive', 'studio' etc. + """ + log.info("Pausing SyncServer for {}".format(representation_id)) + self._paused_representations.add(representation_id) + self.reset_provider_for_file(collection, representation_id, + site_name=site_name, pause=True) + + def unpause_representation(self, collection, representation_id, site_name): + """ + Sets 'representation_id' as unpaused. + + Does not fail or warn if repre wasn't paused. + + Args: + collection (string): project name + representation_id (string): MongoDB objectId value + site_name (string): 'gdrive', 'studio' etc. + """ + log.info("Unpausing SyncServer for {}".format(representation_id)) + try: + self._paused_representations.remove(representation_id) + except KeyError: + pass + # self.paused_representations is not persistent + self.reset_provider_for_file(collection, representation_id, + site_name=site_name, pause=False) + + def is_representation_paused(self, representation_id, + check_parents=False, project_name=None): + """ + Returns if 'representation_id' is paused or not. + + Args: + representation_id (string): MongoDB objectId value + check_parents (bool): check if parent project or server itself + are not paused + project_name (string): project to check if paused + + if 'check_parents', 'project_name' should be set too + Returns: + (bool) + """ + condition = representation_id in self._paused_representations + if check_parents and project_name: + condition = condition or \ + self.is_project_paused(project_name) or \ + self.is_paused() + return condition + + def pause_project(self, project_name): + """ + Sets 'project_name' as paused, eg. no syncing should be + happening on all representation inside. + + Args: + project_name (string): collection name + """ + log.info("Pausing SyncServer for {}".format(project_name)) + self._paused_projects.add(project_name) + + def unpause_project(self, project_name): + """ + Sets 'project_name' as unpaused + + Does not fail or warn if project wasn't paused. + + Args: + project_name (string): collection name + """ + log.info("Unpausing SyncServer for {}".format(project_name)) + try: + self._paused_projects.remove(project_name) + except KeyError: + pass + + def is_project_paused(self, project_name, check_parents=False): + """ + Returns if 'project_name' is paused or not. + + Args: + project_name (string): collection name + check_parents (bool): check if server itself + is not paused + Returns: + (bool) + """ + condition = project_name in self._paused_projects + if check_parents: + condition = condition or self.is_paused() + return condition + + def pause_server(self): + """ + Pause sync server + + It won't check anything, not uploading/downloading... + """ + log.info("Pausing SyncServer") + self._paused = True + + def unpause_server(self): + """ + Unpause server + """ + log.info("Unpausing SyncServer") + self._paused = False + + def is_paused(self): + """ Is server paused """ + return self._paused + + def get_active_sites(self, project_name): + """ + Returns list of active sites for 'project_name'. + + By default it returns ['studio'], this site is default + and always present even if SyncServer is not enabled. (for publish) + + Used mainly for Local settings for user override. + + Args: + project_name (string): + + Returns: + (list) of strings + """ + return self.get_active_sites_from_settings( + get_project_settings(project_name)) + + def get_active_sites_from_settings(self, settings): + """ + List available active sites from incoming 'settings'. Used for + returning 'default' values for Local Settings + + Args: + settings (dict): full settings (global + project) + Returns: + (list) of strings + """ + sync_settings = self._parse_sync_settings_from_settings(settings) + + return self._get_enabled_sites_from_settings(sync_settings) + + def get_configurable_items_for_site(self, project_name, site_name): + """ + Returns list of items that should be configurable by User + + Returns: + (list of dict) + [{key:"root", label:"root", value:"valueFromSettings"}] + """ + # if project_name is None: ..for get_default_project_settings + # return handler.get_configurable_items() + pass + + def get_active_site(self, project_name): + """ + Returns active (mine) site for 'project_name' from settings + + Returns: + (string) + """ + active_site = self.get_sync_project_setting( + project_name)['config']['active_site'] + if active_site == self.LOCAL_SITE: + return get_local_site_id() + return active_site + + # remote sites + def get_remote_sites(self, project_name): + """ + Returns all remote sites configured on 'project_name'. + + If 'project_name' is not enabled for syncing returns []. + + Used by Local setting to allow user choose remote site. + + Args: + project_name (string): + + Returns: + (list) of strings + """ + return self.get_remote_sites_from_settings( + get_project_settings(project_name)) + + def get_remote_sites_from_settings(self, settings): + """ + Get remote sites for returning 'default' values for Local Settings + """ + sync_settings = self._parse_sync_settings_from_settings(settings) + + return self._get_remote_sites_from_settings(sync_settings) + + def get_remote_site(self, project_name): + """ + Returns remote (theirs) site for 'project_name' from settings + """ + remote_site = self.get_sync_project_setting( + project_name)['config']['remote_site'] + if remote_site == self.LOCAL_SITE: + return get_local_site_id() + + return remote_site + + def reset_timer(self): + """ + Called when waiting for next loop should be skipped. + + In case of user's involvement (reset site), start that right away. + """ + self.sync_server_thread.reset_timer() + + def get_enabled_projects(self): + """Returns list of projects which have SyncServer enabled.""" + enabled_projects = [] + for project in self.connection.projects(): + project_name = project["name"] + project_settings = self.get_sync_project_setting(project_name) + if project_settings: + enabled_projects.append(project_name) + + return enabled_projects + """ End of Public API """ + + def get_local_file_path(self, collection, site_name, file_path): + """ + Externalized for app + """ + handler = LocalDriveHandler(collection, site_name) + local_file_path = handler.resolve_path(file_path) + + return local_file_path + + def _get_remote_sites_from_settings(self, sync_settings): + if not self.enabled or not sync_settings.get('enabled'): + return [] + + remote_sites = [self.DEFAULT_SITE, self.LOCAL_SITE] + if sync_settings: + remote_sites.extend(sync_settings.get("sites").keys()) + + return list(set(remote_sites)) + + def _get_enabled_sites_from_settings(self, sync_settings): + sites = [self.DEFAULT_SITE] + if self.enabled and sync_settings.get('enabled'): + sites.append(self.LOCAL_SITE) + + return sites + + def connect_with_modules(self, *_a, **kw): + return + + def tray_init(self): + """ + Actual initialization of Sync Server. + + Called when tray is initialized, it checks if module should be + enabled. If not, no initialization necessary. + """ + # import only in tray, because of Python2 hosts + from .sync_server import SyncServerThread + + if not self.enabled: + return + + enabled_projects = self.get_enabled_projects() + if not enabled_projects: + self.enabled = False + return + + self.lock = threading.Lock() + + try: + self.sync_server_thread = SyncServerThread(self) + from .tray.app import SyncServerWindow + self.widget = SyncServerWindow(self) + except ValueError: + log.info("No system setting for sync. Not syncing.", exc_info=True) + self.enabled = False + except KeyError: + log.info(( + "There are not set presets for SyncServer OR " + "Credentials provided are invalid, " + "no syncing possible"). + format(str(self.sync_project_settings)), exc_info=True) + self.enabled = False + + def tray_start(self): + """ + Triggered when Tray is started. + + Checks if configuration presets are available and if there is + any provider ('gdrive', 'S3') that is activated + (eg. has valid credentials). + + Returns: + None + """ + if self.sync_project_settings and self.enabled: + self.sync_server_thread.start() + else: + log.info("No presets or active providers. " + + "Synchronization not possible.") + + def tray_exit(self): + """ + Stops sync thread if running. + + Called from Module Manager + """ + if not self.sync_server_thread: + return + + if not self.is_running: + return + try: + log.info("Stopping sync server server") + self.sync_server_thread.is_running = False + self.sync_server_thread.stop() + except Exception: + log.warning( + "Error has happened during Killing sync server", + exc_info=True + ) + + def tray_menu(self, parent_menu): + if not self.enabled: + return + + from Qt import QtWidgets + """Add menu or action to Tray(or parent)'s menu""" + action = QtWidgets.QAction(self.label, parent_menu) + action.triggered.connect(self.show_widget) + parent_menu.addAction(action) + parent_menu.addSeparator() + + self.action_show_widget = action + + @property + def is_running(self): + return self.sync_server_thread.is_running + + def get_anatomy(self, project_name): + """ + Get already created or newly created anatomy for project + + Args: + project_name (string): + + Return: + (Anatomy) + """ + return self._anatomies.get('project_name') or Anatomy(project_name) + + @property + def connection(self): + if self._connection is None: + self._connection = AvalonMongoDB() + + return self._connection + + @property + def sync_project_settings(self): + if self._sync_project_settings is None: + self.set_sync_project_settings() + + return self._sync_project_settings + + def set_sync_project_settings(self): + """ + Set sync_project_settings for all projects (caching) + + For performance + """ + sync_project_settings = {} + + for collection in self.connection.database.collection_names(False): + sync_settings = self._parse_sync_settings_from_settings( + get_project_settings(collection)) + if sync_settings: + default_sites = self._get_default_site_configs() + sync_settings['sites'].update(default_sites) + sync_project_settings[collection] = sync_settings + + if not sync_project_settings: + log.info("No enabled and configured projects for sync.") + + self._sync_project_settings = sync_project_settings + + def get_sync_project_setting(self, project_name): + """ Handles pulling sync_server's settings for enabled 'project_name' + + Args: + project_name (str): used in project settings + Returns: + (dict): settings dictionary for the enabled project, + empty if no settings or sync is disabled + """ + # presets set already, do not call again and again + # self.log.debug("project preset {}".format(self.presets)) + if self.sync_project_settings and \ + self.sync_project_settings.get(project_name): + return self.sync_project_settings.get(project_name) + + settings = get_project_settings(project_name) + return self._parse_sync_settings_from_settings(settings) + + def _parse_sync_settings_from_settings(self, settings): + """ settings from api.get_project_settings, TOOD rename """ + sync_settings = settings.get("global").get("sync_server") + if not sync_settings: + log.info("No project setting not syncing.") + return {} + if sync_settings.get("enabled"): + return sync_settings + + return {} + + def _get_default_site_configs(self): + """ + Returns skeleton settings for 'studio' and user's local site + """ + default_config = {'provider': 'local_drive'} + all_sites = {self.DEFAULT_SITE: default_config, + get_local_site_id(): default_config} + return all_sites + + def get_provider_for_site(self, project_name, site): + """ + Return provider name for site. + """ + site_preset = self.get_sync_project_setting(project_name)["sites"].\ + get(site) + if site_preset: + return site_preset["provider"] + + return "NA" + + @time_function + def get_sync_representations(self, collection, active_site, remote_site): + """ + Get representations that should be synced, these could be + recognised by presence of document in 'files.sites', where key is + a provider (GDrive, S3) and value is empty document or document + without 'created_dt' field. (Don't put null to 'created_dt'!). + + Querying of 'to-be-synched' files is offloaded to Mongod for + better performance. Goal is to get as few representations as + possible. + Args: + collection (string): name of collection (in most cases matches + project name + active_site (string): identifier of current active site (could be + 'local_0' when working from home, 'studio' when working in the + studio (default) + remote_site (string): identifier of remote site I want to sync to + + Returns: + (list) of dictionaries + """ + log.debug("Check representations for : {}".format(collection)) + self.connection.Session["AVALON_PROJECT"] = collection + # retry_cnt - number of attempts to sync specific file before giving up + retries_arr = self._get_retries_arr(collection) + query = { + "type": "representation", + "$or": [ + {"$and": [ + { + "files.sites": { + "$elemMatch": { + "name": active_site, + "created_dt": {"$exists": True} + } + }}, { + "files.sites": { + "$elemMatch": { + "name": {"$in": [remote_site]}, + "created_dt": {"$exists": False}, + "tries": {"$in": retries_arr} + } + } + }]}, + {"$and": [ + { + "files.sites": { + "$elemMatch": { + "name": active_site, + "created_dt": {"$exists": False}, + "tries": {"$in": retries_arr} + } + }}, { + "files.sites": { + "$elemMatch": { + "name": {"$in": [remote_site]}, + "created_dt": {"$exists": True} + } + } + } + ]} + ] + } + log.debug("active_site:{} - remote_site:{}".format(active_site, + remote_site)) + log.debug("query: {}".format(query)) + representations = self.connection.find(query) + + return representations + + def check_status(self, file, local_site, remote_site, config_preset): + """ + Check synchronization status for single 'file' of single + 'representation' by single 'provider'. + (Eg. check if 'scene.ma' of lookdev.v10 should be synced to GDrive + + Always is comparing local record, eg. site with + 'name' == self.presets[PROJECT_NAME]['config']["active_site"] + + Args: + file (dictionary): of file from representation in Mongo + local_site (string): - local side of compare (usually 'studio') + remote_site (string): - gdrive etc. + config_preset (dict): config about active site, retries + Returns: + (string) - one of SyncStatus + """ + sites = file.get("sites") or [] + # if isinstance(sites, list): # temporary, old format of 'sites' + # return SyncStatus.DO_NOTHING + _, remote_rec = self._get_site_rec(sites, remote_site) or {} + if remote_rec: # sync remote target + created_dt = remote_rec.get("created_dt") + if not created_dt: + tries = self._get_tries_count_from_rec(remote_rec) + # file will be skipped if unsuccessfully tried over threshold + # error metadata needs to be purged manually in DB to reset + if tries < int(config_preset["retry_cnt"]): + return SyncStatus.DO_UPLOAD + else: + _, local_rec = self._get_site_rec(sites, local_site) or {} + if not local_rec or not local_rec.get("created_dt"): + tries = self._get_tries_count_from_rec(local_rec) + # file will be skipped if unsuccessfully tried over + # threshold times, error metadata needs to be purged + # manually in DB to reset + if tries < int(config_preset["retry_cnt"]): + return SyncStatus.DO_DOWNLOAD + + return SyncStatus.DO_NOTHING + + def update_db(self, collection, new_file_id, file, representation, + site, error=None, progress=None): + """ + Update 'provider' portion of records in DB with success (file_id) + or error (exception) + + Args: + collection (string): name of project - force to db connection as + each file might come from different collection + new_file_id (string): + file (dictionary): info about processed file (pulled from DB) + representation (dictionary): parent repr of file (from DB) + site (string): label ('gdrive', 'S3') + error (string): exception message + progress (float): 0-1 of progress of upload/download + + Returns: + None + """ + representation_id = representation.get("_id") + file_id = file.get("_id") + query = { + "_id": representation_id + } + + update = {} + if new_file_id: + update["$set"] = self._get_success_dict(new_file_id) + # reset previous errors if any + update["$unset"] = self._get_error_dict("", "", "") + elif progress is not None: + update["$set"] = self._get_progress_dict(progress) + else: + tries = self._get_tries_count(file, site) + tries += 1 + + update["$set"] = self._get_error_dict(error, tries) + + arr_filter = [ + {'s.name': site}, + {'f._id': ObjectId(file_id)} + ] + + self.connection.database[collection].update_one( + query, + update, + upsert=True, + array_filters=arr_filter + ) + + if progress is not None: + return + + status = 'failed' + error_str = 'with error {}'.format(error) + if new_file_id: + status = 'succeeded with id {}'.format(new_file_id) + error_str = '' + + source_file = file.get("path", "") + log.debug("File for {} - {source_file} process {status} {error_str}". + format(representation_id, + status=status, + source_file=source_file, + error_str=error_str)) + + def _get_file_info(self, files, _id): + """ + Return record from list of records which name matches to 'provider' + Could be possibly refactored with '_get_provider_rec' together. + + Args: + files (list): of dictionaries with info about published files + _id (string): _id of specific file + + Returns: + (int, dictionary): index from list and record with metadata + about site (if/when created, errors..) + OR (-1, None) if not present + """ + for index, rec in enumerate(files): + if rec.get("_id") == _id: + return index, rec + + return -1, None + + def _get_site_rec(self, sites, site_name): + """ + Return record from list of records which name matches to + 'remote_site_name' + + Args: + sites (list): of dictionaries + site_name (string): 'local_XXX', 'gdrive' + + Returns: + (int, dictionary): index from list and record with metadata + about site (if/when created, errors..) + OR (-1, None) if not present + """ + for index, rec in enumerate(sites): + if rec.get("name") == site_name: + return index, rec + + return -1, None + + def reset_provider_for_file(self, collection, representation_id, + side=None, file_id=None, site_name=None, + remove=False, pause=None, force=False): + """ + Reset information about synchronization for particular 'file_id' + and provider. + Useful for testing or forcing file to be reuploaded. + + 'side' and 'site_name' are disjunctive. + + 'side' is used for resetting local or remote side for + current user for repre. + + 'site_name' is used to set synchronization for particular site. + Should be used when repre should be synced to new site. + + Args: + collection (string): name of project (eg. collection) in DB + representation_id(string): _id of representation + file_id (string): file _id in representation + side (string): local or remote side + site_name (string): for adding new site + remove (bool): if True remove site altogether + pause (bool or None): if True - pause, False - unpause + force (bool): hard reset - currently only for add_site + + Returns: + throws ValueError + """ + query = { + "_id": ObjectId(representation_id) + } + + representation = list(self.connection.database[collection].find(query)) + if not representation: + raise ValueError("Representation {} not found in {}". + format(representation_id, collection)) + if side and site_name: + raise ValueError("Misconfiguration, only one of side and " + + "site_name arguments should be passed.") + + local_site = self.get_active_site(collection) + remote_site = self.get_remote_site(collection) + + if side: + if side == 'local': + site_name = local_site + else: + site_name = remote_site + + elem = {"name": site_name} + + if file_id: # reset site for particular file + self._reset_site_for_file(collection, query, + elem, file_id, site_name) + elif side: # reset site for whole representation + self._reset_site(collection, query, elem, site_name) + elif remove: # remove site for whole representation + self._remove_site(collection, query, representation, site_name) + elif pause is not None: + self._pause_unpause_site(collection, query, + representation, site_name, pause) + else: # add new site to all files for representation + self._add_site(collection, query, representation, elem, site_name, + force) + + def _update_site(self, collection, query, update, arr_filter): + """ + Auxiliary method to call update_one function on DB + + Used for refactoring ugly reset_provider_for_file + """ + self.connection.database[collection].update_one( + query, + update, + upsert=True, + array_filters=arr_filter + ) + + def _reset_site_for_file(self, collection, query, + elem, file_id, site_name): + """ + Resets 'site_name' for 'file_id' on representation in 'query' on + 'collection' + """ + update = { + "$set": {"files.$[f].sites.$[s]": elem} + } + arr_filter = [ + {'s.name': site_name}, + {'f._id': ObjectId(file_id)} + ] + + self._update_site(collection, query, update, arr_filter) + + def _reset_site(self, collection, query, elem, site_name): + """ + Resets 'site_name' for all files of representation in 'query' + """ + update = { + "$set": {"files.$[].sites.$[s]": elem} + } + + arr_filter = [ + {'s.name': site_name} + ] + + self._update_site(collection, query, update, arr_filter) + + def _remove_site(self, collection, query, representation, site_name): + """ + Removes 'site_name' for 'representation' in 'query' + + Throws ValueError if 'site_name' not found on 'representation' + """ + found = False + for repre_file in representation.pop().get("files"): + for site in repre_file.get("sites"): + if site["name"] == site_name: + found = True + break + if not found: + msg = "Site {} not found".format(site_name) + log.info(msg) + raise ValueError(msg) + + update = { + "$pull": {"files.$[].sites": {"name": site_name}} + } + arr_filter = [] + + self._update_site(collection, query, update, arr_filter) + + def _pause_unpause_site(self, collection, query, + representation, site_name, pause): + """ + Pauses/unpauses all files for 'representation' based on 'pause' + + Throws ValueError if 'site_name' not found on 'representation' + """ + found = False + site = None + for repre_file in representation.pop().get("files"): + for site in repre_file.get("sites"): + if site["name"] == site_name: + found = True + break + if not found: + msg = "Site {} not found".format(site_name) + log.info(msg) + raise ValueError(msg) + + if pause: + site['paused'] = pause + else: + if site.get('paused'): + site.pop('paused') + + update = { + "$set": {"files.$[].sites.$[s]": site} + } + + arr_filter = [ + {'s.name': site_name} + ] + + self._update_site(collection, query, update, arr_filter) + + def _add_site(self, collection, query, representation, elem, site_name, + force=False): + """ + Adds 'site_name' to 'representation' on 'collection' + + Use 'force' to remove existing or raises ValueError + """ + for repre_file in representation.pop().get("files"): + for site in repre_file.get("sites"): + if site["name"] == site_name: + if force: + self._reset_site_for_file(collection, query, + elem, repre_file["_id"], + site_name) + return + else: + msg = "Site {} already present".format(site_name) + log.info(msg) + raise ValueError(msg) + + update = { + "$push": {"files.$[].sites": elem} + } + + arr_filter = [] + + self._update_site(collection, query, update, arr_filter) + + def _remove_local_file(self, collection, representation_id, site_name): + """ + Removes all local files for 'site_name' of 'representation_id' + + Args: + collection (string): project name (must match DB) + representation_id (string): MongoDB _id value + site_name (string): name of configured and active site + + Returns: + only logs, catches IndexError and OSError + """ + my_local_site = get_local_site_id() + if my_local_site != site_name: + self.log.warning("Cannot remove non local file for {}". + format(site_name)) + return + + provider_name = self.get_provider_for_site(collection, site_name) + + if provider_name == 'local_drive': + query = { + "_id": ObjectId(representation_id) + } + + representation = list( + self.connection.database[collection].find(query)) + if not representation: + self.log.debug("No repre {} found".format( + representation_id)) + return + + representation = representation.pop() + local_file_path = '' + for file in representation.get("files"): + local_file_path = self.get_local_file_path(collection, + site_name, + file.get("path", "") + ) + try: + self.log.debug("Removing {}".format(local_file_path)) + os.remove(local_file_path) + except IndexError: + msg = "No file set for {}".format(representation_id) + self.log.debug(msg) + raise ValueError(msg) + except OSError: + msg = "File {} cannot be removed".format(file["path"]) + self.log.warning(msg) + raise ValueError(msg) + + folder = None + try: + folder = os.path.dirname(local_file_path) + os.rmdir(folder) + except OSError: + msg = "folder {} cannot be removed".format(folder) + self.log.warning(msg) + raise ValueError(msg) + + def get_loop_delay(self, project_name): + """ + Return count of seconds before next synchronization loop starts + after finish of previous loop. + Returns: + (int): in seconds + """ + ld = self.sync_project_settings[project_name]["config"]["loop_delay"] + return int(ld) + + def show_widget(self): + """Show dialog to enter credentials""" + self.widget.show() + + def _get_success_dict(self, new_file_id): + """ + Provide success metadata ("id", "created_dt") to be stored in Db. + Used in $set: "DICT" part of query. + Sites are array inside of array(file), so real indexes for both + file and site are needed for upgrade in DB. + Args: + new_file_id: id of created file + Returns: + (dictionary) + """ + val = {"files.$[f].sites.$[s].id": new_file_id, + "files.$[f].sites.$[s].created_dt": datetime.now()} + return val + + def _get_error_dict(self, error="", tries="", progress=""): + """ + Provide error metadata to be stored in Db. + Used for set (error and tries provided) or unset mode. + Args: + error: (string) - message + tries: how many times failed + Returns: + (dictionary) + """ + val = {"files.$[f].sites.$[s].last_failed_dt": datetime.now(), + "files.$[f].sites.$[s].error": error, + "files.$[f].sites.$[s].tries": tries, + "files.$[f].sites.$[s].progress": progress + } + return val + + def _get_tries_count_from_rec(self, rec): + """ + Get number of failed attempts to sync from site record + Args: + rec (dictionary): info about specific site record + Returns: + (int) - number of failed attempts + """ + if not rec: + return 0 + return rec.get("tries", 0) + + def _get_tries_count(self, file, provider): + """ + Get number of failed attempts to sync + Args: + file (dictionary): info about specific file + provider (string): name of site ('gdrive' or specific user site) + Returns: + (int) - number of failed attempts + """ + _, rec = self._get_site_rec(file.get("sites", []), provider) + return rec.get("tries", 0) + + def _get_progress_dict(self, progress): + """ + Provide progress metadata to be stored in Db. + Used during upload/download for GUI to show. + Args: + progress: (float) - 0-1 progress of upload/download + Returns: + (dictionary) + """ + val = {"files.$[f].sites.$[s].progress": progress} + return val + + def _get_retries_arr(self, project_name): + """ + Returns array with allowed values in 'tries' field. If repre + contains these values, it means it was tried to be synchronized + but failed. We try up to 'self.presets["retry_cnt"]' times before + giving up and skipping representation. + Returns: + (list) + """ + retry_cnt = self.sync_project_settings[project_name].\ + get("config")["retry_cnt"] + arr = [i for i in range(int(retry_cnt))] + arr.append(None) + + return arr + + def _get_roots_config(self, presets, project_name, site_name): + """ + Returns configured root(s) for 'project_name' and 'site_name' from + settings ('presets') + """ + return presets[project_name]['sites'][site_name]['root'] diff --git a/openpype/modules/sync_server/tray/app.py b/openpype/modules/sync_server/tray/app.py index 476e9d16e8..2538675c51 100644 --- a/openpype/modules/sync_server/tray/app.py +++ b/openpype/modules/sync_server/tray/app.py @@ -1,35 +1,17 @@ from Qt import QtWidgets, QtCore, QtGui -from Qt.QtCore import Qt -import attr -import os -import sys -import subprocess - -from openpype.tools.settings import ( - ProjectListWidget, - style -) - -from avalon.tools.delegates import PrettyTimeDelegate, pretty_timestamp -from bson.objectid import ObjectId +from openpype.tools.settings import style from openpype.lib import PypeLogger -from openpype.api import get_local_site_id +from openpype import resources + +from openpype.modules.sync_server.tray.widgets import ( + SyncProjectListWidget, + SyncRepresentationSummaryWidget +) log = PypeLogger().get_logger("SyncServer") -STATUS = { - 0: 'In Progress', - 1: 'Failed', - 2: 'Queued', - 3: 'Paused', - 4: 'Synced OK', - -1: 'Not available' -} - -DUMMY_PROJECT = "No project configured" - class SyncServerWindow(QtWidgets.QDialog): """ @@ -44,8 +26,8 @@ class SyncServerWindow(QtWidgets.QDialog): self.setFocusPolicy(QtCore.Qt.StrongFocus) self.setStyleSheet(style.load_stylesheet()) - self.setWindowIcon(QtGui.QIcon(style.app_icon_path())) - self.resize(1400, 800) + self.setWindowIcon(QtGui.QIcon(resources.pype_icon_filepath())) + self.resize(1450, 700) self.timer = QtCore.QTimer() self.timer.timeout.connect(self._hide_message) @@ -65,7 +47,7 @@ class SyncServerWindow(QtWidgets.QDialog): left_column_layout.addWidget(self.pause_btn) left_column.setLayout(left_column_layout) - repres = SyncRepresentationWidget( + repres = SyncRepresentationSummaryWidget( sync_server, project=self.projects.current_project, parent=self) @@ -96,7 +78,7 @@ class SyncServerWindow(QtWidgets.QDialog): layout.addWidget(footer) self.setLayout(body_layout) - self.setWindowTitle("Sync Server") + self.setWindowTitle("Sync Queue") self.projects.project_changed.connect( lambda: repres.table_view.model().set_project( @@ -134,1912 +116,3 @@ class SyncServerWindow(QtWidgets.QDialog): """ self.message.setText("") self.message.hide() - - -class SyncProjectListWidget(ProjectListWidget): - """ - Lists all projects that are synchronized to choose from - """ - - def __init__(self, sync_server, parent): - super(SyncProjectListWidget, self).__init__(parent) - self.sync_server = sync_server - self.project_list.setContextMenuPolicy(QtCore.Qt.CustomContextMenu) - self.project_list.customContextMenuRequested.connect( - self._on_context_menu) - self.project_name = None - self.local_site = None - self.icons = {} - - def validate_context_change(self): - return True - - def refresh(self): - model = self.project_list.model() - model.clear() - - project_name = None - for project_name in self.sync_server.get_sync_project_settings().\ - keys(): - if self.sync_server.is_paused() or \ - self.sync_server.is_project_paused(project_name): - icon = self._get_icon("paused") - else: - icon = self._get_icon("synced") - - model.appendRow(QtGui.QStandardItem(icon, project_name)) - - if len(self.sync_server.get_sync_project_settings().keys()) == 0: - model.appendRow(QtGui.QStandardItem(DUMMY_PROJECT)) - - self.current_project = self.project_list.currentIndex().data( - QtCore.Qt.DisplayRole - ) - if not self.current_project: - self.current_project = self.project_list.model().item(0). \ - data(QtCore.Qt.DisplayRole) - - if project_name: - self.local_site = self.sync_server.get_active_site(project_name) - - def _get_icon(self, status): - if not self.icons.get(status): - resource_path = os.path.dirname(__file__) - resource_path = os.path.join(resource_path, "..", - "resources") - pix_url = "{}/{}.png".format(resource_path, status) - icon = QtGui.QIcon(pix_url) - self.icons[status] = icon - else: - icon = self.icons[status] - return icon - - def _on_context_menu(self, point): - point_index = self.project_list.indexAt(point) - if not point_index.isValid(): - return - - self.project_name = point_index.data(QtCore.Qt.DisplayRole) - - menu = QtWidgets.QMenu() - actions_mapping = {} - - if self.sync_server.is_project_paused(self.project_name): - action = QtWidgets.QAction("Unpause") - actions_mapping[action] = self._unpause - else: - action = QtWidgets.QAction("Pause") - actions_mapping[action] = self._pause - menu.addAction(action) - - if self.local_site == get_local_site_id(): - action = QtWidgets.QAction("Clear local project") - actions_mapping[action] = self._clear_project - menu.addAction(action) - - result = menu.exec_(QtGui.QCursor.pos()) - if result: - to_run = actions_mapping[result] - if to_run: - to_run() - - def _pause(self): - if self.project_name: - self.sync_server.pause_project(self.project_name) - self.project_name = None - self.refresh() - - def _unpause(self): - if self.project_name: - self.sync_server.unpause_project(self.project_name) - self.project_name = None - self.refresh() - - def _clear_project(self): - if self.project_name: - self.sync_server.clear_project(self.project_name, self.local_site) - self.project_name = None - self.refresh() - - -class ProjectModel(QtCore.QAbstractListModel): - def __init__(self, *args, projects=None, **kwargs): - super(ProjectModel, self).__init__(*args, **kwargs) - self.projects = projects or [] - - def data(self, index, role): - if role == Qt.DisplayRole: - # See below for the data structure. - status, text = self.projects[index.row()] - # Return the todo text only. - return text - - def rowCount(self, index): - return len(self.todos) - - -class SyncRepresentationWidget(QtWidgets.QWidget): - """ - Summary dialog with list of representations that matches current - settings 'local_site' and 'remote_site'. - """ - active_changed = QtCore.Signal() # active index changed - message_generated = QtCore.Signal(str) - - default_widths = ( - ("asset", 210), - ("subset", 190), - ("version", 10), - ("representation", 90), - ("created_dt", 100), - ("sync_dt", 100), - ("local_site", 60), - ("remote_site", 70), - ("files_count", 70), - ("files_size", 70), - ("priority", 20), - ("state", 50) - ) - - def __init__(self, sync_server, project=None, parent=None): - super(SyncRepresentationWidget, self).__init__(parent) - - self.sync_server = sync_server - - self._selected_id = None # keep last selected _id - self.representation_id = None - self.site_name = None # to pause/unpause representation - - self.filter = QtWidgets.QLineEdit() - self.filter.setPlaceholderText("Filter representations..") - - top_bar_layout = QtWidgets.QHBoxLayout() - top_bar_layout.addWidget(self.filter) - - self.table_view = QtWidgets.QTableView() - headers = [item[0] for item in self.default_widths] - - model = SyncRepresentationModel(sync_server, headers, project) - self.table_view.setModel(model) - self.table_view.setContextMenuPolicy(QtCore.Qt.CustomContextMenu) - self.table_view.setSelectionMode( - QtWidgets.QAbstractItemView.SingleSelection) - self.table_view.setSelectionBehavior( - QtWidgets.QAbstractItemView.SelectRows) - self.table_view.horizontalHeader().setSortIndicator( - -1, Qt.AscendingOrder) - self.table_view.setSortingEnabled(True) - self.table_view.setAlternatingRowColors(True) - self.table_view.verticalHeader().hide() - - time_delegate = PrettyTimeDelegate(self) - column = self.table_view.model().get_header_index("created_dt") - self.table_view.setItemDelegateForColumn(column, time_delegate) - column = self.table_view.model().get_header_index("sync_dt") - self.table_view.setItemDelegateForColumn(column, time_delegate) - - column = self.table_view.model().get_header_index("local_site") - delegate = ImageDelegate(self) - self.table_view.setItemDelegateForColumn(column, delegate) - - column = self.table_view.model().get_header_index("remote_site") - delegate = ImageDelegate(self) - self.table_view.setItemDelegateForColumn(column, delegate) - - column = self.table_view.model().get_header_index("files_size") - delegate = SizeDelegate(self) - self.table_view.setItemDelegateForColumn(column, delegate) - - for column_name, width in self.default_widths: - idx = model.get_header_index(column_name) - self.table_view.setColumnWidth(idx, width) - - layout = QtWidgets.QVBoxLayout(self) - layout.setContentsMargins(0, 0, 0, 0) - layout.addLayout(top_bar_layout) - layout.addWidget(self.table_view) - - self.table_view.doubleClicked.connect(self._double_clicked) - self.filter.textChanged.connect(lambda: model.set_filter( - self.filter.text())) - self.table_view.customContextMenuRequested.connect( - self._on_context_menu) - - self.table_view.model().modelReset.connect(self._set_selection) - - self.selection_model = self.table_view.selectionModel() - self.selection_model.selectionChanged.connect(self._selection_changed) - - def _selection_changed(self, new_selection): - index = self.selection_model.currentIndex() - self._selected_id = \ - self.table_view.model().data(index, Qt.UserRole) - - def _set_selection(self): - """ - Sets selection to 'self._selected_id' if exists. - - Keep selection during model refresh. - """ - if self._selected_id: - index = self.table_view.model().get_index(self._selected_id) - if index and index.isValid(): - mode = QtCore.QItemSelectionModel.Select | \ - QtCore.QItemSelectionModel.Rows - self.selection_model.setCurrentIndex(index, mode) - else: - self._selected_id = None - - def _double_clicked(self, index): - """ - Opens representation dialog with all files after doubleclick - """ - _id = self.table_view.model().data(index, Qt.UserRole) - detail_window = SyncServerDetailWindow( - self.sync_server, _id, self.table_view.model()._project) - detail_window.exec() - - def _on_context_menu(self, point): - """ - Shows menu with loader actions on Right-click. - """ - point_index = self.table_view.indexAt(point) - if not point_index.isValid(): - return - - self.item = self.table_view.model()._data[point_index.row()] - self.representation_id = self.item._id - log.debug("menu representation _id:: {}". - format(self.representation_id)) - - menu = QtWidgets.QMenu() - actions_mapping = {} - - action = QtWidgets.QAction("Open in explorer") - actions_mapping[action] = self._open_in_explorer - menu.addAction(action) - - local_site, local_progress = self.item.local_site.split() - remote_site, remote_progress = self.item.remote_site.split() - local_progress = float(local_progress) - remote_progress = float(remote_progress) - - # progress smaller then 1.0 --> in progress or queued - if local_progress < 1.0: - self.site_name = local_site - else: - self.site_name = remote_site - - if self.item.state in [STATUS[0], STATUS[2]]: - action = QtWidgets.QAction("Pause") - actions_mapping[action] = self._pause - menu.addAction(action) - - if self.item.state == STATUS[3]: - action = QtWidgets.QAction("Unpause") - actions_mapping[action] = self._unpause - menu.addAction(action) - - # if self.item.state == STATUS[1]: - # action = QtWidgets.QAction("Open error detail") - # actions_mapping[action] = self._show_detail - # menu.addAction(action) - - if remote_progress == 1.0: - action = QtWidgets.QAction("Reset local site") - actions_mapping[action] = self._reset_local_site - menu.addAction(action) - - if local_progress == 1.0: - action = QtWidgets.QAction("Reset remote site") - actions_mapping[action] = self._reset_remote_site - menu.addAction(action) - - if local_site != self.sync_server.DEFAULT_SITE: - action = QtWidgets.QAction("Completely remove from local") - actions_mapping[action] = self._remove_site - menu.addAction(action) - else: - action = QtWidgets.QAction("Mark for sync to local") - actions_mapping[action] = self._add_site - menu.addAction(action) - - if not actions_mapping: - action = QtWidgets.QAction("< No action >") - actions_mapping[action] = None - menu.addAction(action) - - result = menu.exec_(QtGui.QCursor.pos()) - if result: - to_run = actions_mapping[result] - if to_run: - to_run() - - self.table_view.model().refresh() - - def _pause(self): - self.sync_server.pause_representation(self.table_view.model()._project, - self.representation_id, - self.site_name) - self.site_name = None - self.message_generated.emit("Paused {}".format(self.representation_id)) - - def _unpause(self): - self.sync_server.unpause_representation( - self.table_view.model()._project, - self.representation_id, - self.site_name) - self.site_name = None - self.message_generated.emit("Unpaused {}".format( - self.representation_id)) - - # temporary here for testing, will be removed TODO - def _add_site(self): - log.info(self.representation_id) - project_name = self.table_view.model()._project - local_site_name = self.sync_server.get_my_local_site() - try: - self.sync_server.add_site( - project_name, - self.representation_id, - local_site_name - ) - self.message_generated.emit( - "Site {} added for {}".format(local_site_name, - self.representation_id)) - except ValueError as exp: - self.message_generated.emit("Error {}".format(str(exp))) - - def _remove_site(self): - """ - Removes site record AND files. - - This is ONLY for representations stored on local site, which - cannot be same as SyncServer.DEFAULT_SITE. - - This could only happen when artist work on local machine, not - connected to studio mounted drives. - """ - log.info("Removing {}".format(self.representation_id)) - try: - local_site = get_local_site_id() - self.sync_server.remove_site( - self.table_view.model()._project, - self.representation_id, - local_site, - True - ) - self.message_generated.emit("Site {} removed".format(local_site)) - except ValueError as exp: - self.message_generated.emit("Error {}".format(str(exp))) - - def _reset_local_site(self): - """ - Removes errors or success metadata for particular file >> forces - redo of upload/download - """ - self.sync_server.reset_provider_for_file( - self.table_view.model()._project, - self.representation_id, - 'local' - ) - - def _reset_remote_site(self): - """ - Removes errors or success metadata for particular file >> forces - redo of upload/download - """ - self.sync_server.reset_provider_for_file( - self.table_view.model()._project, - self.representation_id, - 'remote' - ) - - def _open_in_explorer(self): - if not self.item: - return - - fpath = self.item.path - project = self.table_view.model()._project - fpath = self.sync_server.get_local_file_path(project, fpath) - - fpath = os.path.normpath(os.path.dirname(fpath)) - if os.path.isdir(fpath): - if 'win' in sys.platform: # windows - subprocess.Popen('explorer "%s"' % fpath) - elif sys.platform == 'darwin': # macOS - subprocess.Popen(['open', fpath]) - else: # linux - try: - subprocess.Popen(['xdg-open', fpath]) - except OSError: - raise OSError('unsupported xdg-open call??') - - -class SyncRepresentationModel(QtCore.QAbstractTableModel): - """ - Model for summary of representations. - - Groups files information per representation. Allows sorting and - full text filtering. - - Allows pagination, most of heavy lifting is being done on DB side. - Single model matches to single collection. When project is changed, - model is reset and refreshed. - - Args: - sync_server (SyncServer) - object to call server operations (update - db status, set site status...) - header (list) - names of visible columns - project (string) - collection name, all queries must be called on - a specific collection - - """ - PAGE_SIZE = 20 # default page size to query for - REFRESH_SEC = 5000 # in seconds, requery DB for new status - DEFAULT_SORT = { - "updated_dt_remote": -1, - "_id": 1 - } - SORT_BY_COLUMN = [ - "context.asset", # asset - "context.subset", # subset - "context.version", # version - "context.representation", # representation - "updated_dt_local", # local created_dt - "updated_dt_remote", # remote created_dt - "avg_progress_local", # local progress - "avg_progress_remote", # remote progress - "files_count", # count of files - "files_size", # file size of all files - "context.asset", # priority TODO - "status" # state - ] - - @attr.s - class SyncRepresentation: - """ - Auxiliary object for easier handling. - - Fields must contain all header values (+ any arbitrary values). - """ - _id = attr.ib() - asset = attr.ib() - subset = attr.ib() - version = attr.ib() - representation = attr.ib() - created_dt = attr.ib(default=None) - sync_dt = attr.ib(default=None) - local_site = attr.ib(default=None) - remote_site = attr.ib(default=None) - files_count = attr.ib(default=None) - files_size = attr.ib(default=None) - priority = attr.ib(default=None) - state = attr.ib(default=None) - path = attr.ib(default=None) - - def __init__(self, sync_server, header, project=None): - super(SyncRepresentationModel, self).__init__() - self._header = header - self._data = [] - self._project = project - self._rec_loaded = 0 - self._total_records = 0 # how many documents query actually found - self.filter = None - - self._initialized = False - if not self._project or self._project == DUMMY_PROJECT: - return - - self.sync_server = sync_server - # TODO think about admin mode - # this is for regular user, always only single local and single remote - self.local_site = self.sync_server.get_active_site(self._project) - self.remote_site = self.sync_server.get_remote_site(self._project) - - self.projection = self.get_default_projection() - - self.sort = self.DEFAULT_SORT - - self.query = self.get_default_query() - self.default_query = list(self.get_default_query()) - - representations = self.dbcon.aggregate(self.query) - self.refresh(representations) - - self.timer = QtCore.QTimer() - self.timer.timeout.connect(self.tick) - self.timer.start(self.REFRESH_SEC) - - @property - def dbcon(self): - """ - Database object with preselected project (collection) to run DB - operations (find, aggregate). - - All queries should go through this (because of collection). - """ - return self.sync_server.connection.database[self._project] - - def data(self, index, role): - item = self._data[index.row()] - - if role == Qt.DisplayRole: - return attr.asdict(item)[self._header[index.column()]] - if role == Qt.UserRole: - return item._id - - def rowCount(self, index): - return len(self._data) - - def columnCount(self, index): - return len(self._header) - - def headerData(self, section, orientation, role): - if role == Qt.DisplayRole: - if orientation == Qt.Horizontal: - return str(self._header[section]) - - def tick(self): - """ - Triggers refresh of model. - - Because of pagination, prepared (sorting, filtering) query needs - to be run on DB every X seconds. - """ - self.refresh(representations=None, load_records=self._rec_loaded) - self.timer.start(self.REFRESH_SEC) - - def get_header_index(self, value): - """ - Returns index of 'value' in headers - - Args: - value (str): header name value - Returns: - (int) - """ - return self._header.index(value) - - def refresh(self, representations=None, load_records=0): - """ - Reloads representations from DB if necessary, adds them to model. - - Runs periodically (every X seconds) or by demand (change of - sorting, filtering etc.) - - Emits 'modelReset' signal. - - Args: - representations (PaginationResult object): pass result of - aggregate query from outside - mostly for testing only - load_records (int) - enforces how many records should be - actually queried (scrolled a couple of times to list more - than single page of records) - """ - if self.sync_server.is_paused() or \ - self.sync_server.is_project_paused(self._project): - return - - self.beginResetModel() - self._data = [] - self._rec_loaded = 0 - - if not representations: - self.query = self.get_default_query(load_records) - representations = self.dbcon.aggregate(self.query) - - self._add_page_records(self.local_site, self.remote_site, - representations) - self.endResetModel() - - def _add_page_records(self, local_site, remote_site, representations): - """ - Process all records from 'representation' and add them to storage. - - Args: - local_site (str): name of local site (mine) - remote_site (str): name of cloud provider (theirs) - representations (Mongo Cursor) - mimics result set, 1 object - with paginatedResults array and totalCount array - """ - result = representations.next() - count = 0 - total_count = result.get("totalCount") - if total_count: - count = total_count.pop().get('count') - self._total_records = count - - local_provider = _translate_provider_for_icon(self.sync_server, - self._project, - local_site) - remote_provider = _translate_provider_for_icon(self.sync_server, - self._project, - remote_site) - - for repre in result.get("paginatedResults"): - context = repre.get("context").pop() - files = repre.get("files", []) - if isinstance(files, dict): # aggregate returns dictionary - files = [files] - - # representation without files doesnt concern us - if not files: - continue - - local_updated = remote_updated = None - if repre.get('updated_dt_local'): - local_updated = \ - repre.get('updated_dt_local').strftime("%Y%m%dT%H%M%SZ") - - if repre.get('updated_dt_remote'): - remote_updated = \ - repre.get('updated_dt_remote').strftime("%Y%m%dT%H%M%SZ") - - avg_progress_remote = _convert_progress( - repre.get('avg_progress_remote', '0')) - avg_progress_local = _convert_progress( - repre.get('avg_progress_local', '0')) - - if context.get("version"): - version = "v{:0>3d}".format(context.get("version")) - else: - version = "hero" - - item = self.SyncRepresentation( - repre.get("_id"), - context.get("asset"), - context.get("subset"), - version, - context.get("representation"), - local_updated, - remote_updated, - '{} {}'.format(local_provider, avg_progress_local), - '{} {}'.format(remote_provider, avg_progress_remote), - repre.get("files_count", 1), - repre.get("files_size", 0), - 1, - STATUS[repre.get("status", -1)], - files[0].get('path') - ) - - self._data.append(item) - self._rec_loaded += 1 - - def canFetchMore(self, index): - """ - Check if there are more records than currently loaded - """ - # 'skip' might be suboptimal when representation hits 500k+ - return self._total_records > self._rec_loaded - - def fetchMore(self, index): - """ - Add more record to model. - - Called when 'canFetchMore' returns true, which means there are - more records in DB than loaded. - """ - log.debug("fetchMore") - items_to_fetch = min(self._total_records - self._rec_loaded, - self.PAGE_SIZE) - self.query = self.get_default_query(self._rec_loaded) - representations = self.dbcon.aggregate(self.query) - self.beginInsertRows(index, - self._rec_loaded, - self._rec_loaded + items_to_fetch - 1) - - self._add_page_records(self.local_site, self.remote_site, - representations) - - self.endInsertRows() - - def sort(self, index, order): - """ - Summary sort per representation. - - Sort is happening on a DB side, model is reset, db queried - again. - - Args: - index (int): column index - order (int): 0| - """ - # limit unwanted first re-sorting by view - if index < 0: - return - - self._rec_loaded = 0 - if order == 0: - order = 1 - else: - order = -1 - - self.sort = {self.SORT_BY_COLUMN[index]: order, '_id': 1} - self.query = self.get_default_query() - # import json - # log.debug(json.dumps(self.query, indent=4).replace('False', 'false').\ - # replace('True', 'true').replace('None', 'null')) - - representations = self.dbcon.aggregate(self.query) - self.refresh(representations) - - def set_filter(self, filter): - """ - Adds text value filtering - - Args: - filter (str): string inputted by user - """ - self.filter = filter - self.refresh() - - def set_project(self, project): - """ - Changes project, called after project selection is changed - - Args: - project (str): name of project - """ - self._project = project - self.sync_server.set_sync_project_settings() - self.local_site = self.sync_server.get_active_site(self._project) - self.remote_site = self.sync_server.get_remote_site(self._project) - self.refresh() - - def get_index(self, id): - """ - Get index of 'id' value. - - Used for keeping selection after refresh. - - Args: - id (str): MongoDB _id - Returns: - (QModelIndex) - """ - for i in range(self.rowCount(None)): - index = self.index(i, 0) - value = self.data(index, Qt.UserRole) - if value == id: - return index - return None - - def get_default_query(self, limit=0): - """ - Returns basic aggregate query for main table. - - Main table provides summary information about representation, - which could have multiple files. Details are accessible after - double click on representation row. - Columns: - 'created_dt' - max of created or updated (when failed) per repr - 'sync_dt' - same for remote side - 'local_site' - progress of repr on local side, 1 = finished - 'remote_site' - progress on remote side, calculates from files - 'state' - - 0 - in progress - 1 - failed - 2 - queued - 3 - paused - 4 - finished on both sides - - are calculated and must be calculated in DB because of - pagination - - Args: - limit (int): how many records should be returned, by default - it 'PAGE_SIZE' for performance. - Should be overridden by value of loaded records for refresh - functionality (got more records by scrolling, refresh - shouldn't reset that) - """ - if limit == 0: - limit = SyncRepresentationModel.PAGE_SIZE - - return [ - {"$match": self._get_match_part()}, - {'$unwind': '$files'}, - # merge potentially unwinded records back to single per repre - {'$addFields': { - 'order_remote': { - '$filter': {'input': '$files.sites', 'as': 'p', - 'cond': {'$eq': ['$$p.name', self.remote_site]} - }}, - 'order_local': { - '$filter': {'input': '$files.sites', 'as': 'p', - 'cond': {'$eq': ['$$p.name', self.local_site]} - }} - }}, - {'$addFields': { - # prepare progress per file, presence of 'created_dt' denotes - # successfully finished load/download - 'progress_remote': {'$first': { - '$cond': [{'$size': "$order_remote.progress"}, - "$order_remote.progress", - {'$cond': [ - {'$size': "$order_remote.created_dt"}, - [1], - [0] - ]} - ]}}, - 'progress_local': {'$first': { - '$cond': [{'$size': "$order_local.progress"}, - "$order_local.progress", - {'$cond': [ - {'$size': "$order_local.created_dt"}, - [1], - [0] - ]} - ]}}, - # file might be successfully created or failed, not both - 'updated_dt_remote': {'$first': { - '$cond': [{'$size': "$order_remote.created_dt"}, - "$order_remote.created_dt", - {'$cond': [ - {'$size': "$order_remote.last_failed_dt"}, - "$order_remote.last_failed_dt", - [] - ]} - ]}}, - 'updated_dt_local': {'$first': { - '$cond': [{'$size': "$order_local.created_dt"}, - "$order_local.created_dt", - {'$cond': [ - {'$size': "$order_local.last_failed_dt"}, - "$order_local.last_failed_dt", - [] - ]} - ]}}, - 'files_size': {'$ifNull': ["$files.size", 0]}, - 'failed_remote': { - '$cond': [{'$size': "$order_remote.last_failed_dt"}, - 1, - 0]}, - 'failed_local': { - '$cond': [{'$size': "$order_local.last_failed_dt"}, - 1, - 0]}, - 'failed_local_tries': { - '$cond': [{'$size': '$order_local.tries'}, - {'$first': '$order_local.tries'}, - 0]}, - 'failed_remote_tries': { - '$cond': [{'$size': '$order_remote.tries'}, - {'$first': '$order_remote.tries'}, - 0]}, - 'paused_remote': { - '$cond': [{'$size': "$order_remote.paused"}, - 1, - 0]}, - 'paused_local': { - '$cond': [{'$size': "$order_local.paused"}, - 1, - 0]}, - }}, - {'$group': { - '_id': '$_id', - # pass through context - same for representation - 'context': {'$addToSet': '$context'}, - 'data': {'$addToSet': '$data'}, - # pass through files as a list - 'files': {'$addToSet': '$files'}, - # count how many files - 'files_count': {'$sum': 1}, - 'files_size': {'$sum': '$files_size'}, - # sum avg progress, finished = 1 - 'avg_progress_remote': {'$avg': "$progress_remote"}, - 'avg_progress_local': {'$avg': "$progress_local"}, - # select last touch of file - 'updated_dt_remote': {'$max': "$updated_dt_remote"}, - 'failed_remote': {'$sum': '$failed_remote'}, - 'failed_local': {'$sum': '$failed_local'}, - 'failed_remote_tries': {'$sum': '$failed_remote_tries'}, - 'failed_local_tries': {'$sum': '$failed_local_tries'}, - 'paused_remote': {'$sum': '$paused_remote'}, - 'paused_local': {'$sum': '$paused_local'}, - 'updated_dt_local': {'$max': "$updated_dt_local"} - }}, - {"$project": self.projection}, - {"$sort": self.sort}, - { - '$facet': { - 'paginatedResults': [{'$skip': self._rec_loaded}, - {'$limit': limit}], - 'totalCount': [{'$count': 'count'}] - } - } - ] - - def _get_match_part(self): - """ - Extend match part with filter if present. - - Filter is set by user input. Each model has different fields to be - checked. - If performance issues are found, '$text' and text indexes should - be investigated. - - Fulltext searches in: - context.subset - context.asset - context.representation names AND _id (ObjectId) - """ - base_match = { - "type": "representation", - 'files.sites.name': {'$all': [self.local_site, - self.remote_site]} - } - if not self.filter: - return base_match - else: - regex_str = '.*{}.*'.format(self.filter) - base_match['$or'] = [ - {'context.subset': {'$regex': regex_str, '$options': 'i'}}, - {'context.asset': {'$regex': regex_str, '$options': 'i'}}, - {'context.representation': {'$regex': regex_str, - '$options': 'i'}}] - - if ObjectId.is_valid(self.filter): - base_match['$or'] = [{'_id': ObjectId(self.filter)}] - - return base_match - - def get_default_projection(self): - """ - Projection part for aggregate query. - - All fields with '1' will be returned, no others. - - Returns: - (dict) - """ - return { - "context.subset": 1, - "context.asset": 1, - "context.version": 1, - "context.representation": 1, - "data.path": 1, - "files": 1, - 'files_count': 1, - "files_size": 1, - 'avg_progress_remote': 1, - 'avg_progress_local': 1, - 'updated_dt_remote': 1, - 'updated_dt_local': 1, - 'paused_remote': 1, - 'paused_local': 1, - 'status': { - '$switch': { - 'branches': [ - { - 'case': { - '$or': ['$paused_remote', '$paused_local']}, - 'then': 3 # Paused - }, - { - 'case': { - '$or': [ - {'$gte': ['$failed_local_tries', 3]}, - {'$gte': ['$failed_remote_tries', 3]} - ]}, - 'then': 1}, - { - 'case': { - '$or': [{'$eq': ['$avg_progress_remote', 0]}, - {'$eq': ['$avg_progress_local', 0]}]}, - 'then': 2 # Queued - }, - { - 'case': {'$or': [{'$and': [ - {'$gt': ['$avg_progress_remote', 0]}, - {'$lt': ['$avg_progress_remote', 1]} - ]}, - {'$and': [ - {'$gt': ['$avg_progress_local', 0]}, - {'$lt': ['$avg_progress_local', 1]} - ]} - ]}, - 'then': 0 # In progress - }, - { - 'case': {'$and': [ - {'$eq': ['$avg_progress_remote', 1]}, - {'$eq': ['$avg_progress_local', 1]} - ]}, - 'then': 4 # Synced OK - }, - ], - 'default': -1 - } - } - } - - -class SyncServerDetailWindow(QtWidgets.QDialog): - def __init__(self, sync_server, _id, project, parent=None): - log.debug( - "!!! SyncServerDetailWindow _id:: {}".format(_id)) - super(SyncServerDetailWindow, self).__init__(parent) - self.setWindowFlags(QtCore.Qt.Window) - self.setFocusPolicy(QtCore.Qt.StrongFocus) - - self.setStyleSheet(style.load_stylesheet()) - self.setWindowIcon(QtGui.QIcon(style.app_icon_path())) - self.resize(1000, 400) - - body = QtWidgets.QWidget() - footer = QtWidgets.QWidget() - footer.setFixedHeight(20) - - container = SyncRepresentationDetailWidget(sync_server, _id, project, - parent=self) - body_layout = QtWidgets.QHBoxLayout(body) - body_layout.addWidget(container) - body_layout.setContentsMargins(0, 0, 0, 0) - - self.message = QtWidgets.QLabel() - self.message.hide() - - footer_layout = QtWidgets.QVBoxLayout(footer) - footer_layout.addWidget(self.message) - footer_layout.setContentsMargins(0, 0, 0, 0) - - layout = QtWidgets.QVBoxLayout(self) - layout.addWidget(body) - layout.addWidget(footer) - - self.setLayout(body_layout) - self.setWindowTitle("Sync Representation Detail") - - -class SyncRepresentationDetailWidget(QtWidgets.QWidget): - """ - Widget to display list of synchronizable files for single repre. - - Args: - _id (str): representation _id - project (str): name of project with repre - parent (QDialog): SyncServerDetailWindow - """ - active_changed = QtCore.Signal() # active index changed - - default_widths = ( - ("file", 290), - ("created_dt", 120), - ("sync_dt", 120), - ("local_site", 60), - ("remote_site", 60), - ("size", 60), - ("priority", 20), - ("state", 90) - ) - - def __init__(self, sync_server, _id=None, project=None, parent=None): - super(SyncRepresentationDetailWidget, self).__init__(parent) - - log.debug("Representation_id:{}".format(_id)) - self.representation_id = _id - self.item = None # set to item that mouse was clicked over - self.project = project - - self.sync_server = sync_server - - self._selected_id = None - - self.filter = QtWidgets.QLineEdit() - self.filter.setPlaceholderText("Filter representation..") - - top_bar_layout = QtWidgets.QHBoxLayout() - top_bar_layout.addWidget(self.filter) - - self.table_view = QtWidgets.QTableView() - headers = [item[0] for item in self.default_widths] - - model = SyncRepresentationDetailModel(sync_server, headers, _id, - project) - self.table_view.setModel(model) - self.table_view.setContextMenuPolicy(QtCore.Qt.CustomContextMenu) - self.table_view.setSelectionMode( - QtWidgets.QAbstractItemView.SingleSelection) - self.table_view.setSelectionBehavior( - QtWidgets.QTableView.SelectRows) - self.table_view.horizontalHeader().setSortIndicator(-1, - Qt.AscendingOrder) - self.table_view.setSortingEnabled(True) - self.table_view.setAlternatingRowColors(True) - self.table_view.verticalHeader().hide() - - time_delegate = PrettyTimeDelegate(self) - column = self.table_view.model().get_header_index("created_dt") - self.table_view.setItemDelegateForColumn(column, time_delegate) - column = self.table_view.model().get_header_index("sync_dt") - self.table_view.setItemDelegateForColumn(column, time_delegate) - - column = self.table_view.model().get_header_index("local_site") - delegate = ImageDelegate(self) - self.table_view.setItemDelegateForColumn(column, delegate) - - column = self.table_view.model().get_header_index("remote_site") - delegate = ImageDelegate(self) - self.table_view.setItemDelegateForColumn(column, delegate) - - column = self.table_view.model().get_header_index("size") - delegate = SizeDelegate(self) - self.table_view.setItemDelegateForColumn(column, delegate) - - for column_name, width in self.default_widths: - idx = model.get_header_index(column_name) - self.table_view.setColumnWidth(idx, width) - - layout = QtWidgets.QVBoxLayout(self) - layout.setContentsMargins(0, 0, 0, 0) - layout.addLayout(top_bar_layout) - layout.addWidget(self.table_view) - - self.filter.textChanged.connect(lambda: model.set_filter( - self.filter.text())) - self.table_view.customContextMenuRequested.connect( - self._on_context_menu) - - self.table_view.model().modelReset.connect(self._set_selection) - - self.selection_model = self.table_view.selectionModel() - self.selection_model.selectionChanged.connect(self._selection_changed) - - def _selection_changed(self): - index = self.selection_model.currentIndex() - self._selected_id = self.table_view.model().data(index, Qt.UserRole) - - def _set_selection(self): - """ - Sets selection to 'self._selected_id' if exists. - - Keep selection during model refresh. - """ - if self._selected_id: - index = self.table_view.model().get_index(self._selected_id) - if index.isValid(): - mode = QtCore.QItemSelectionModel.Select | \ - QtCore.QItemSelectionModel.Rows - self.selection_model.setCurrentIndex(index, mode) - else: - self._selected_id = None - - def _show_detail(self): - """ - Shows windows with error message for failed sync of a file. - """ - dt = max(self.item.created_dt, self.item.sync_dt) - detail_window = SyncRepresentationErrorWindow(self.item._id, - self.project, - dt, - self.item.tries, - self.item.error) - detail_window.exec() - - def _on_context_menu(self, point): - """ - Shows menu with loader actions on Right-click. - """ - point_index = self.table_view.indexAt(point) - if not point_index.isValid(): - return - - self.item = self.table_view.model()._data[point_index.row()] - - menu = QtWidgets.QMenu() - actions_mapping = {} - - action = QtWidgets.QAction("Open in explorer") - actions_mapping[action] = self._open_in_explorer - menu.addAction(action) - - if self.item.state == STATUS[1]: - action = QtWidgets.QAction("Open error detail") - actions_mapping[action] = self._show_detail - menu.addAction(action) - - remote_site, remote_progress = self.item.remote_site.split() - if float(remote_progress) == 1.0: - action = QtWidgets.QAction("Reset local site") - actions_mapping[action] = self._reset_local_site - menu.addAction(action) - - local_site, local_progress = self.item.local_site.split() - if float(local_progress) == 1.0: - action = QtWidgets.QAction("Reset remote site") - actions_mapping[action] = self._reset_remote_site - menu.addAction(action) - - if not actions_mapping: - action = QtWidgets.QAction("< No action >") - actions_mapping[action] = None - menu.addAction(action) - - result = menu.exec_(QtGui.QCursor.pos()) - if result: - to_run = actions_mapping[result] - if to_run: - to_run() - - def _reset_local_site(self): - """ - Removes errors or success metadata for particular file >> forces - redo of upload/download - """ - self.sync_server.reset_provider_for_file( - self.table_view.model()._project, - self.representation_id, - 'local', - self.item._id) - self.table_view.model().refresh() - - def _reset_remote_site(self): - """ - Removes errors or success metadata for particular file >> forces - redo of upload/download - """ - self.sync_server.reset_provider_for_file( - self.table_view.model()._project, - self.representation_id, - 'remote', - self.item._id) - self.table_view.model().refresh() - - def _open_in_explorer(self): - if not self.item: - return - - fpath = self.item.path - project = self.table_view.model()._project - fpath = self.sync_server.get_local_file_path(project, fpath) - - fpath = os.path.normpath(os.path.dirname(fpath)) - if os.path.isdir(fpath): - if 'win' in sys.platform: # windows - subprocess.Popen('explorer "%s"' % fpath) - elif sys.platform == 'darwin': # macOS - subprocess.Popen(['open', fpath]) - else: # linux - try: - subprocess.Popen(['xdg-open', fpath]) - except OSError: - raise OSError('unsupported xdg-open call??') - - -class SyncRepresentationDetailModel(QtCore.QAbstractTableModel): - """ - List of all syncronizable files per single representation. - - Used in detail window accessible after clicking on single repre in the - summary. - - Args: - sync_server (SyncServer) - object to call server operations (update - db status, set site status...) - header (list) - names of visible columns - _id (string) - MongoDB _id of representation - project (string) - collection name, all queries must be called on - a specific collection - """ - PAGE_SIZE = 30 - # TODO add filter filename - DEFAULT_SORT = { - "files.path": 1 - } - SORT_BY_COLUMN = [ - "files.path", - "updated_dt_local", # local created_dt - "updated_dt_remote", # remote created_dt - "progress_local", # local progress - "progress_remote", # remote progress - "size", # remote progress - "context.asset", # priority TODO - "status" # state - ] - - @attr.s - class SyncRepresentationDetail: - """ - Auxiliary object for easier handling. - - Fields must contain all header values (+ any arbitrary values). - """ - _id = attr.ib() - file = attr.ib() - created_dt = attr.ib(default=None) - sync_dt = attr.ib(default=None) - local_site = attr.ib(default=None) - remote_site = attr.ib(default=None) - size = attr.ib(default=None) - priority = attr.ib(default=None) - state = attr.ib(default=None) - tries = attr.ib(default=None) - error = attr.ib(default=None) - path = attr.ib(default=None) - - def __init__(self, sync_server, header, _id, project=None): - super(SyncRepresentationDetailModel, self).__init__() - self._header = header - self._data = [] - self._project = project - self._rec_loaded = 0 - self._total_records = 0 # how many documents query actually found - self.filter = None - self._id = _id - self._initialized = False - - self.sync_server = sync_server - # TODO think about admin mode - # this is for regular user, always only single local and single remote - self.local_site = self.sync_server.get_active_site(self._project) - self.remote_site = self.sync_server.get_remote_site(self._project) - - self.sort = self.DEFAULT_SORT - - # in case we would like to hide/show some columns - self.projection = self.get_default_projection() - - self.query = self.get_default_query() - representations = self.dbcon.aggregate(self.query) - self.refresh(representations) - - self.timer = QtCore.QTimer() - self.timer.timeout.connect(self.tick) - self.timer.start(SyncRepresentationModel.REFRESH_SEC) - - @property - def dbcon(self): - return self.sync_server.connection.database[self._project] - - def tick(self): - self.refresh(representations=None, load_records=self._rec_loaded) - self.timer.start(SyncRepresentationModel.REFRESH_SEC) - - def get_header_index(self, value): - """ - Returns index of 'value' in headers - - Args: - value (str): header name value - Returns: - (int) - """ - return self._header.index(value) - - def data(self, index, role): - item = self._data[index.row()] - if role == Qt.DisplayRole: - return attr.asdict(item)[self._header[index.column()]] - if role == Qt.UserRole: - return item._id - - def rowCount(self, index): - return len(self._data) - - def columnCount(self, index): - return len(self._header) - - def headerData(self, section, orientation, role): - if role == Qt.DisplayRole: - if orientation == Qt.Horizontal: - return str(self._header[section]) - - def refresh(self, representations=None, load_records=0): - if self.sync_server.is_paused(): - return - - self.beginResetModel() - self._data = [] - self._rec_loaded = 0 - - if not representations: - self.query = self.get_default_query(load_records) - representations = self.dbcon.aggregate(self.query) - - self._add_page_records(self.local_site, self.remote_site, - representations) - self.endResetModel() - - def _add_page_records(self, local_site, remote_site, representations): - """ - Process all records from 'representation' and add them to storage. - - Args: - local_site (str): name of local site (mine) - remote_site (str): name of cloud provider (theirs) - representations (Mongo Cursor) - mimics result set, 1 object - with paginatedResults array and totalCount array - """ - # representations is a Cursor, get first - result = representations.next() - count = 0 - total_count = result.get("totalCount") - if total_count: - count = total_count.pop().get('count') - self._total_records = count - - local_provider = _translate_provider_for_icon(self.sync_server, - self._project, - local_site) - remote_provider = _translate_provider_for_icon(self.sync_server, - self._project, - remote_site) - - for repre in result.get("paginatedResults"): - # log.info("!!! repre:: {}".format(repre)) - files = repre.get("files", []) - if isinstance(files, dict): # aggregate returns dictionary - files = [files] - - for file in files: - local_updated = remote_updated = None - if repre.get('updated_dt_local'): - local_updated = \ - repre.get('updated_dt_local').strftime( - "%Y%m%dT%H%M%SZ") - - if repre.get('updated_dt_remote'): - remote_updated = \ - repre.get('updated_dt_remote').strftime( - "%Y%m%dT%H%M%SZ") - - progress_remote = _convert_progress( - repre.get('progress_remote', '0')) - progress_local = _convert_progress( - repre.get('progress_local', '0')) - - errors = [] - if repre.get('failed_remote_error'): - errors.append(repre.get('failed_remote_error')) - if repre.get('failed_local_error'): - errors.append(repre.get('failed_local_error')) - - item = self.SyncRepresentationDetail( - file.get("_id"), - os.path.basename(file["path"]), - local_updated, - remote_updated, - '{} {}'.format(local_provider, progress_local), - '{} {}'.format(remote_provider, progress_remote), - file.get('size', 0), - 1, - STATUS[repre.get("status", -1)], - repre.get("tries"), - '\n'.join(errors), - file.get('path') - - ) - self._data.append(item) - self._rec_loaded += 1 - - def canFetchMore(self, index): - """ - Check if there are more records than currently loaded - """ - # 'skip' might be suboptimal when representation hits 500k+ - return self._total_records > self._rec_loaded - - def fetchMore(self, index): - """ - Add more record to model. - - Called when 'canFetchMore' returns true, which means there are - more records in DB than loaded. - 'self._buffer' is used to stash cursor to limit requery - """ - log.debug("fetchMore") - items_to_fetch = min(self._total_records - self._rec_loaded, - self.PAGE_SIZE) - self.query = self.get_default_query(self._rec_loaded) - representations = self.dbcon.aggregate(self.query) - self.beginInsertRows(index, - self._rec_loaded, - self._rec_loaded + items_to_fetch - 1) - - self._add_page_records(self.local_site, self.remote_site, - representations) - - self.endInsertRows() - - def sort(self, index, order): - # limit unwanted first re-sorting by view - if index < 0: - return - - self._rec_loaded = 0 # change sort - reset from start - - if order == 0: - order = 1 - else: - order = -1 - - self.sort = {self.SORT_BY_COLUMN[index]: order} - self.query = self.get_default_query() - - representations = self.dbcon.aggregate(self.query) - self.refresh(representations) - - def set_filter(self, filter): - self.filter = filter - self.refresh() - - def get_index(self, id): - """ - Get index of 'id' value. - - Used for keeping selection after refresh. - - Args: - id (str): MongoDB _id - Returns: - (QModelIndex) - """ - for i in range(self.rowCount(None)): - index = self.index(i, 0) - value = self.data(index, Qt.UserRole) - if value == id: - return index - return None - - def get_default_query(self, limit=0): - """ - Gets query that gets used when no extra sorting, filtering or - projecting is needed. - - Called for basic table view. - - Returns: - [(dict)] - list with single dict - appropriate for aggregate - function for MongoDB - """ - if limit == 0: - limit = SyncRepresentationModel.PAGE_SIZE - - return [ - {"$match": self._get_match_part()}, - {"$unwind": "$files"}, - {'$addFields': { - 'order_remote': { - '$filter': {'input': '$files.sites', 'as': 'p', - 'cond': {'$eq': ['$$p.name', self.remote_site]} - }}, - 'order_local': { - '$filter': {'input': '$files.sites', 'as': 'p', - 'cond': {'$eq': ['$$p.name', self.local_site]} - }} - }}, - {'$addFields': { - # prepare progress per file, presence of 'created_dt' denotes - # successfully finished load/download - 'progress_remote': {'$first': { - '$cond': [{'$size': "$order_remote.progress"}, - "$order_remote.progress", - {'$cond': [ - {'$size': "$order_remote.created_dt"}, - [1], - [0] - ]} - ]}}, - 'progress_local': {'$first': { - '$cond': [{'$size': "$order_local.progress"}, - "$order_local.progress", - {'$cond': [ - {'$size': "$order_local.created_dt"}, - [1], - [0] - ]} - ]}}, - # file might be successfully created or failed, not both - 'updated_dt_remote': {'$first': { - '$cond': [ - {'$size': "$order_remote.created_dt"}, - "$order_remote.created_dt", - { - '$cond': [ - {'$size': "$order_remote.last_failed_dt"}, - "$order_remote.last_failed_dt", - [] - ] - } - ] - }}, - 'updated_dt_local': {'$first': { - '$cond': [ - {'$size': "$order_local.created_dt"}, - "$order_local.created_dt", - { - '$cond': [ - {'$size': "$order_local.last_failed_dt"}, - "$order_local.last_failed_dt", - [] - ] - } - ] - }}, - 'paused_remote': { - '$cond': [{'$size': "$order_remote.paused"}, - 1, - 0]}, - 'paused_local': { - '$cond': [{'$size': "$order_local.paused"}, - 1, - 0]}, - 'failed_remote': { - '$cond': [{'$size': "$order_remote.last_failed_dt"}, - 1, - 0]}, - 'failed_local': { - '$cond': [{'$size': "$order_local.last_failed_dt"}, - 1, - 0]}, - 'failed_remote_error': {'$first': { - '$cond': [{'$size': "$order_remote.error"}, - "$order_remote.error", - [""]]}}, - 'failed_local_error': {'$first': { - '$cond': [{'$size': "$order_local.error"}, - "$order_local.error", - [""]]}}, - 'tries': {'$first': { - '$cond': [ - {'$size': "$order_local.tries"}, - "$order_local.tries", - {'$cond': [ - {'$size': "$order_remote.tries"}, - "$order_remote.tries", - [] - ]} - ]}} - }}, - {"$project": self.projection}, - {"$sort": self.sort}, - { - '$facet': { - 'paginatedResults': [{'$skip': self._rec_loaded}, - {'$limit': limit}], - 'totalCount': [{'$count': 'count'}] - } - } - ] - - def _get_match_part(self): - """ - Returns different content for 'match' portion if filtering by - name is present - - Returns: - (dict) - """ - if not self.filter: - return { - "type": "representation", - "_id": self._id - } - else: - regex_str = '.*{}.*'.format(self.filter) - return { - "type": "representation", - "_id": self._id, - '$or': [{'files.path': {'$regex': regex_str, '$options': 'i'}}] - } - - def get_default_projection(self): - """ - Projection part for aggregate query. - - All fields with '1' will be returned, no others. - - Returns: - (dict) - """ - return { - "files": 1, - 'progress_remote': 1, - 'progress_local': 1, - 'updated_dt_remote': 1, - 'updated_dt_local': 1, - 'paused_remote': 1, - 'paused_local': 1, - 'failed_remote_error': 1, - 'failed_local_error': 1, - 'tries': 1, - 'status': { - '$switch': { - 'branches': [ - { - 'case': { - '$or': ['$paused_remote', '$paused_local']}, - 'then': 3 # Paused - }, - { - 'case': { - '$and': [{'$or': ['$failed_remote', - '$failed_local']}, - {'$eq': ['$tries', 3]}]}, - 'then': 1 # Failed (3 tries) - }, - { - 'case': { - '$or': [{'$eq': ['$progress_remote', 0]}, - {'$eq': ['$progress_local', 0]}]}, - 'then': 2 # Queued - }, - { - 'case': { - '$or': ['$failed_remote', '$failed_local']}, - 'then': 1 # Failed - }, - { - 'case': {'$or': [{'$and': [ - {'$gt': ['$progress_remote', 0]}, - {'$lt': ['$progress_remote', 1]} - ]}, - {'$and': [ - {'$gt': ['$progress_local', 0]}, - {'$lt': ['$progress_local', 1]} - ]} - ]}, - 'then': 0 # In Progress - }, - { - 'case': {'$and': [ - {'$eq': ['$progress_remote', 1]}, - {'$eq': ['$progress_local', 1]} - ]}, - 'then': 4 # Synced OK - }, - ], - 'default': -1 - } - }, - 'data.path': 1 - } - - -class ImageDelegate(QtWidgets.QStyledItemDelegate): - """ - Prints icon of site and progress of synchronization - """ - - def __init__(self, parent=None): - super(ImageDelegate, self).__init__(parent) - self.icons = {} - - def paint(self, painter, option, index): - option = QtWidgets.QStyleOptionViewItem(option) - option.showDecorationSelected = True - - if (option.showDecorationSelected and - (option.state & QtWidgets.QStyle.State_Selected)): - painter.setOpacity(0.20) # highlight color is a bit off - painter.fillRect(option.rect, - option.palette.highlight()) - painter.setOpacity(1) - - d = index.data(QtCore.Qt.DisplayRole) - if d: - provider, value = d.split() - else: - return - - if not self.icons.get(provider): - resource_path = os.path.dirname(__file__) - resource_path = os.path.join(resource_path, "..", - "providers", "resources") - pix_url = "{}/{}.png".format(resource_path, provider) - pixmap = QtGui.QPixmap(pix_url) - self.icons[provider] = pixmap - else: - pixmap = self.icons[provider] - - point = QtCore.QPoint(option.rect.x() + - (option.rect.width() - pixmap.width()) / 2, - option.rect.y() + - (option.rect.height() - pixmap.height()) / 2) - painter.drawPixmap(point, pixmap) - - painter.setOpacity(0.5) - overlay_rect = option.rect - overlay_rect.setHeight(overlay_rect.height() * (1.0 - float(value))) - painter.fillRect(overlay_rect, - QtGui.QBrush(QtGui.QColor(0, 0, 0, 200))) - painter.setOpacity(1) - - -class SyncRepresentationErrorWindow(QtWidgets.QDialog): - def __init__(self, _id, project, dt, tries, msg, parent=None): - super(SyncRepresentationErrorWindow, self).__init__(parent) - self.setWindowFlags(QtCore.Qt.Window) - self.setFocusPolicy(QtCore.Qt.StrongFocus) - - self.setStyleSheet(style.load_stylesheet()) - self.setWindowIcon(QtGui.QIcon(style.app_icon_path())) - self.resize(250, 200) - - body = QtWidgets.QWidget() - footer = QtWidgets.QWidget() - footer.setFixedHeight(20) - - container = SyncRepresentationErrorWidget(_id, project, dt, tries, msg, - parent=self) - body_layout = QtWidgets.QHBoxLayout(body) - body_layout.addWidget(container) - body_layout.setContentsMargins(0, 0, 0, 0) - - message = QtWidgets.QLabel() - message.hide() - - footer_layout = QtWidgets.QVBoxLayout(footer) - footer_layout.addWidget(message) - footer_layout.setContentsMargins(0, 0, 0, 0) - - layout = QtWidgets.QVBoxLayout(self) - layout.addWidget(body) - layout.addWidget(footer) - - self.setLayout(body_layout) - self.setWindowTitle("Sync Representation Error Detail") - - -class SyncRepresentationErrorWidget(QtWidgets.QWidget): - """ - Dialog to show when sync error happened, prints error message - """ - - def __init__(self, _id, project, dt, tries, msg, parent=None): - super(SyncRepresentationErrorWidget, self).__init__(parent) - - layout = QtWidgets.QFormLayout(self) - layout.addRow(QtWidgets.QLabel("Last update date"), - QtWidgets.QLabel(pretty_timestamp(dt))) - layout.addRow(QtWidgets.QLabel("Retries"), - QtWidgets.QLabel(str(tries))) - layout.addRow(QtWidgets.QLabel("Error message"), - QtWidgets.QLabel(msg)) - - -class SizeDelegate(QtWidgets.QStyledItemDelegate): - """ - Pretty print for file size - """ - - def __init__(self, parent=None): - super(SizeDelegate, self).__init__(parent) - - def displayText(self, value, locale): - if value is None: - # Ignore None value - return - - return self._pretty_size(value) - - def _pretty_size(self, value, suffix='B'): - for unit in ['', 'Ki', 'Mi', 'Gi', 'Ti', 'Pi', 'Ei', 'Zi']: - if abs(value) < 1024.0: - return "%3.1f%s%s" % (value, unit, suffix) - value /= 1024.0 - return "%.1f%s%s" % (value, 'Yi', suffix) - - -def _convert_progress(value): - try: - progress = float(value) - except (ValueError, TypeError): - progress = 0.0 - - return progress - - -def _translate_provider_for_icon(sync_server, project, site): - """ - Get provider for 'site' - - This is used for getting icon, 'studio' should have different icon - then local sites, even the provider 'local_drive' is same - - """ - if site == sync_server.DEFAULT_SITE: - return sync_server.DEFAULT_SITE - return sync_server.get_provider_for_site(project, site) diff --git a/openpype/modules/sync_server/tray/lib.py b/openpype/modules/sync_server/tray/lib.py new file mode 100644 index 0000000000..04bd1f568e --- /dev/null +++ b/openpype/modules/sync_server/tray/lib.py @@ -0,0 +1,166 @@ +from Qt import QtCore +import attr +import abc +import six + +from openpype.lib import PypeLogger + + +log = PypeLogger().get_logger("SyncServer") + +STATUS = { + 0: 'In Progress', + 1: 'Queued', + 2: 'Failed', + 3: 'Paused', + 4: 'Synced OK', + -1: 'Not available' +} + +DUMMY_PROJECT = "No project configured" + +ProviderRole = QtCore.Qt.UserRole + 2 +ProgressRole = QtCore.Qt.UserRole + 4 +DateRole = QtCore.Qt.UserRole + 6 +FailedRole = QtCore.Qt.UserRole + 8 +HeaderNameRole = QtCore.Qt.UserRole + 10 +FullItemRole = QtCore.Qt.UserRole + 12 + + +@six.add_metaclass(abc.ABCMeta) +class AbstractColumnFilter: + + def __init__(self, column_name, dbcon=None): + self.column_name = column_name + self.dbcon = dbcon + self._search_variants = [] + + def search_variants(self): + """ + Returns all flavors of search available for this column, + """ + return self._search_variants + + @abc.abstractmethod + def values(self): + """ + Returns dict of available values for filter {'label':'value'} + """ + pass + + @abc.abstractmethod + def prepare_match_part(self, values): + """ + Prepares format valid for $match part from 'values + + Args: + values (dict): {'label': 'value'} + Returns: + (dict): {'COLUMN_NAME': {'$in': ['val1', 'val2']}} + """ + pass + + +class PredefinedSetFilter(AbstractColumnFilter): + + def __init__(self, column_name, values): + super().__init__(column_name) + self._search_variants = ['checkbox'] + self._values = values + if self._values and \ + list(self._values.keys())[0] == list(self._values.values())[0]: + self._search_variants.append('text') + + def values(self): + return {k: v for k, v in self._values.items()} + + def prepare_match_part(self, values): + return {'$in': list(values.keys())} + + +class RegexTextFilter(AbstractColumnFilter): + + def __init__(self, column_name): + super().__init__(column_name) + self._search_variants = ['text'] + + def values(self): + return {} + + def prepare_match_part(self, values): + """ values = {'text1 text2': 'text1 text2'} """ + if not values: + return {} + + regex_strs = set() + text = list(values.keys())[0] # only single key always expected + for word in text.split(): + regex_strs.add('.*{}.*'.format(word)) + + return {"$regex": "|".join(regex_strs), + "$options": 'i'} + + +class MultiSelectFilter(AbstractColumnFilter): + + def __init__(self, column_name, values=None, dbcon=None): + super().__init__(column_name) + self._values = values + self.dbcon = dbcon + self._search_variants = ['checkbox'] + + def values(self): + if self._values: + return {k: v for k, v in self._values.items()} + + recs = self.dbcon.find({'type': self.column_name}, {"name": 1, + "_id": -1}) + values = {} + for item in recs: + values[item["name"]] = item["name"] + return dict(sorted(values.items(), key=lambda it: it[1])) + + def prepare_match_part(self, values): + return {'$in': list(values.keys())} + + +@attr.s +class FilterDefinition: + type = attr.ib() + values = attr.ib(factory=list) + + +def pretty_size(value, suffix='B'): + for unit in ['', 'Ki', 'Mi', 'Gi', 'Ti', 'Pi', 'Ei', 'Zi']: + if abs(value) < 1024.0: + return "%3.1f%s%s" % (value, unit, suffix) + value /= 1024.0 + return "%.1f%s%s" % (value, 'Yi', suffix) + + +def convert_progress(value): + try: + progress = float(value) + except (ValueError, TypeError): + progress = 0.0 + + return progress + + +def translate_provider_for_icon(sync_server, project, site): + """ + Get provider for 'site' + + This is used for getting icon, 'studio' should have different icon + then local sites, even the provider 'local_drive' is same + + """ + if site == sync_server.DEFAULT_SITE: + return sync_server.DEFAULT_SITE + return sync_server.get_provider_for_site(project, site) + + +def get_item_by_id(model, object_id): + index = model.get_index(object_id) + item = model.data(index, FullItemRole) + return item diff --git a/openpype/modules/sync_server/tray/models.py b/openpype/modules/sync_server/tray/models.py new file mode 100644 index 0000000000..8fdd9487a4 --- /dev/null +++ b/openpype/modules/sync_server/tray/models.py @@ -0,0 +1,1263 @@ +import os +import attr +from bson.objectid import ObjectId + +from Qt import QtCore +from Qt.QtCore import Qt + +from avalon.tools.delegates import pretty_timestamp + +from openpype.lib import PypeLogger + +from openpype.modules.sync_server.tray import lib + + +log = PypeLogger().get_logger("SyncServer") + + +class ProjectModel(QtCore.QAbstractListModel): + def __init__(self, *args, projects=None, **kwargs): + super(ProjectModel, self).__init__(*args, **kwargs) + self.projects = projects or [] + + def data(self, index, role): + if role == Qt.DisplayRole: + # See below for the data structure. + status, text = self.projects[index.row()] + # Return the todo text only. + return text + + def rowCount(self, _index): + return len(self.todos) + + def columnCount(self, _index): + return len(self._header) + + +class _SyncRepresentationModel(QtCore.QAbstractTableModel): + + COLUMN_LABELS = [] + + PAGE_SIZE = 20 # default page size to query for + REFRESH_SEC = 5000 # in seconds, requery DB for new status + + @property + def dbcon(self): + """ + Database object with preselected project (collection) to run DB + operations (find, aggregate). + + All queries should go through this (because of collection). + """ + return self.sync_server.connection.database[self.project] + + @property + def project(self): + """Returns project""" + return self._project + + @property + def column_filtering(self): + return self._column_filtering + + def rowCount(self, _index): + return len(self._data) + + def columnCount(self, _index=None): + return len(self._header) + + def headerData(self, section, orientation, role=Qt.DisplayRole): + if section >= len(self.COLUMN_LABELS): + return + + if role == Qt.DisplayRole: + if orientation == Qt.Horizontal: + return self.COLUMN_LABELS[section][1] + + if role == lib.HeaderNameRole: + if orientation == Qt.Horizontal: + return self.COLUMN_LABELS[section][0] # return name + + def get_column(self, index): + return self.COLUMN_LABELS[index] + + def get_header_index(self, value): + """ + Returns index of 'value' in headers + + Args: + value (str): header name value + Returns: + (int) + """ + return self._header.index(value) + + def refresh(self, representations=None, load_records=0): + """ + Reloads representations from DB if necessary, adds them to model. + + Runs periodically (every X seconds) or by demand (change of + sorting, filtering etc.) + + Emits 'modelReset' signal. + + Args: + representations (PaginationResult object): pass result of + aggregate query from outside - mostly for testing only + load_records (int) - enforces how many records should be + actually queried (scrolled a couple of times to list more + than single page of records) + """ + if self.sync_server.is_paused() or \ + self.sync_server.is_project_paused(self.project): + return + self.refresh_started.emit() + self.beginResetModel() + self._data = [] + self._rec_loaded = 0 + + if not representations: + self.query = self.get_query(load_records) + representations = self.dbcon.aggregate(self.query) + + self.add_page_records(self.active_site, self.remote_site, + representations) + self.endResetModel() + self.refresh_finished.emit() + + def tick(self): + """ + Triggers refresh of model. + + Because of pagination, prepared (sorting, filtering) query needs + to be run on DB every X seconds. + """ + self.refresh(representations=None, load_records=self._rec_loaded) + self.timer.start(self.REFRESH_SEC) + + def canFetchMore(self, _index): + """ + Check if there are more records than currently loaded + """ + # 'skip' might be suboptimal when representation hits 500k+ + return self._total_records > self._rec_loaded + + def fetchMore(self, index): + """ + Add more record to model. + + Called when 'canFetchMore' returns true, which means there are + more records in DB than loaded. + """ + log.debug("fetchMore") + items_to_fetch = min(self._total_records - self._rec_loaded, + self.PAGE_SIZE) + self.query = self.get_query(self._rec_loaded) + representations = self.dbcon.aggregate(self.query) + self.beginInsertRows(index, + self._rec_loaded, + self._rec_loaded + items_to_fetch - 1) + + self.add_page_records(self.active_site, self.remote_site, + representations) + + self.endInsertRows() + + def sort(self, index, order): + """ + Summary sort per representation. + + Sort is happening on a DB side, model is reset, db queried + again. + + It remembers one last sort, adds it as secondary after new sort. + + Args: + index (int): column index + order (int): 0| + """ + # limit unwanted first re-sorting by view + if index < 0: + return + + self._rec_loaded = 0 + if order == 0: + order = 1 + else: + order = -1 + + backup_sort = dict(self.sort) + + self.sort = {self.SORT_BY_COLUMN[index]: order} # reset + # add last one + for key, val in backup_sort.items(): + if key != '_id': + self.sort[key] = val + break + # add default one + self.sort['_id'] = 1 + + self.query = self.get_query() + # import json + # log.debug(json.dumps(self.query, indent=4).\ + # replace('False', 'false').\ + # replace('True', 'true').replace('None', 'null')) + + representations = self.dbcon.aggregate(self.query) + self.refresh(representations) + + def set_word_filter(self, word_filter): + """ + Adds text value filtering + + Args: + word_filter (str): string inputted by user + """ + self._word_filter = word_filter + self.refresh() + + def get_filters(self): + """ + Returns all available filter editors per column_name keys. + """ + filters = {} + for column_name, _ in self.COLUMN_LABELS: + filter_rec = self.COLUMN_FILTERS.get(column_name) + if filter_rec: + filter_rec.dbcon = self.dbcon + filters[column_name] = filter_rec + + return filters + + def get_column_filter(self, index): + """ + Returns filter object for column 'index + + Args: + index(int): index of column in header + + Returns: + (AbstractColumnFilter) + """ + column_name = self._header[index] + + filter_rec = self.COLUMN_FILTERS.get(column_name) + if filter_rec: + filter_rec.dbcon = self.dbcon # up-to-date db connection + + return filter_rec + + def set_column_filtering(self, checked_values): + """ + Sets dictionary used in '$match' part of MongoDB aggregate + + Args: + checked_values(dict): key:values ({'status':{1:"Foo",3:"Bar"}} + + Modifies: + self._column_filtering : {'status': {'$in': [1, 2, 3]}} + """ + filtering = {} + for column_name, dict_value in checked_values.items(): + column_f = self.COLUMN_FILTERS.get(column_name) + if not column_f: + continue + column_f.dbcon = self.dbcon + filtering[column_name] = column_f.prepare_match_part(dict_value) + + self._column_filtering = filtering + + def get_column_filter_values(self, index): + """ + Returns list of available values for filtering in the column + + Args: + index(int): index of column in header + + Returns: + (dict) of value: label shown in filtering menu + 'value' is used in MongoDB query, 'label' is human readable for + menu + for some columns ('subset') might be 'value' and 'label' same + """ + filter_rec = self.get_column_filter(index) + if not filter_rec: + return {} + + return filter_rec.values() + + def set_project(self, project): + """ + Changes project, called after project selection is changed + + Args: + project (str): name of project + """ + self._project = project + self.sync_server.set_sync_project_settings() + self.active_site = self.sync_server.get_active_site(self.project) + self.remote_site = self.sync_server.get_remote_site(self.project) + self.refresh() + + def get_index(self, id): + """ + Get index of 'id' value. + + Used for keeping selection after refresh. + + Args: + id (str): MongoDB _id + Returns: + (QModelIndex) + """ + for i in range(self.rowCount(None)): + index = self.index(i, 0) + value = self.data(index, Qt.UserRole) + if value == id: + return index + return None + + +class SyncRepresentationSummaryModel(_SyncRepresentationModel): + """ + Model for summary of representations. + + Groups files information per representation. Allows sorting and + full text filtering. + + Allows pagination, most of heavy lifting is being done on DB side. + Single model matches to single collection. When project is changed, + model is reset and refreshed. + + Args: + sync_server (SyncServer) - object to call server operations (update + db status, set site status...) + header (list) - names of visible columns + project (string) - collection name, all queries must be called on + a specific collection + + """ + COLUMN_LABELS = [ + ("asset", "Asset"), + ("subset", "Subset"), + ("version", "Version"), + ("representation", "Representation"), + ("local_site", "Active site"), + ("remote_site", "Remote site"), + ("files_count", "Files"), + ("files_size", "Size"), + ("priority", "Priority"), + ("status", "Status") + ] + + DEFAULT_SORT = { + "updated_dt_remote": -1, + "_id": 1 + } + SORT_BY_COLUMN = [ + "asset", # asset + "subset", # subset + "version", # version + "representation", # representation + "updated_dt_local", # local created_dt + "updated_dt_remote", # remote created_dt + "files_count", # count of files + "files_size", # file size of all files + "context.asset", # priority TODO + "status" # status + ] + + COLUMN_FILTERS = { + 'status': lib.PredefinedSetFilter('status', lib.STATUS), + 'subset': lib.RegexTextFilter('subset'), + 'asset': lib.RegexTextFilter('asset'), + 'representation': lib.MultiSelectFilter('representation') + } + + refresh_started = QtCore.Signal() + refresh_finished = QtCore.Signal() + + @attr.s + class SyncRepresentation: + """ + Auxiliary object for easier handling. + + Fields must contain all header values (+ any arbitrary values). + """ + _id = attr.ib() + asset = attr.ib() + subset = attr.ib() + version = attr.ib() + representation = attr.ib() + created_dt = attr.ib(default=None) + sync_dt = attr.ib(default=None) + local_site = attr.ib(default=None) + remote_site = attr.ib(default=None) + local_provider = attr.ib(default=None) + remote_provider = attr.ib(default=None) + local_progress = attr.ib(default=None) + remote_progress = attr.ib(default=None) + files_count = attr.ib(default=None) + files_size = attr.ib(default=None) + priority = attr.ib(default=None) + status = attr.ib(default=None) + path = attr.ib(default=None) + + def __init__(self, sync_server, header, project=None): + super(SyncRepresentationSummaryModel, self).__init__() + self._header = header + self._data = [] + self._project = project + self._rec_loaded = 0 + self._total_records = 0 # how many documents query actually found + self._word_filter = None + self._column_filtering = {} + + self._word_filter = None + + self._initialized = False + if not self._project or self._project == lib.DUMMY_PROJECT: + return + + self.sync_server = sync_server + # TODO think about admin mode + # this is for regular user, always only single local and single remote + self.active_site = self.sync_server.get_active_site(self.project) + self.remote_site = self.sync_server.get_remote_site(self.project) + + self.sort = self.DEFAULT_SORT + + self.query = self.get_query() + self.default_query = list(self.get_query()) + + representations = self.dbcon.aggregate(self.query) + self.refresh(representations) + + self.timer = QtCore.QTimer() + self.timer.timeout.connect(self.tick) + self.timer.start(self.REFRESH_SEC) + + def data(self, index, role): + item = self._data[index.row()] + + if role == lib.FullItemRole: + return item + + header_value = self._header[index.column()] + if role == lib.ProviderRole: + if header_value == 'local_site': + return item.local_provider + if header_value == 'remote_site': + return item.remote_provider + + if role == lib.ProgressRole: + if header_value == 'local_site': + return item.local_progress + if header_value == 'remote_site': + return item.remote_progress + + if role == lib.DateRole: + if header_value == 'local_site': + if item.created_dt: + return pretty_timestamp(item.created_dt) + if header_value == 'remote_site': + if item.sync_dt: + return pretty_timestamp(item.sync_dt) + + if role == lib.FailedRole: + if header_value == 'local_site': + return item.status == lib.STATUS[2] and \ + item.local_progress < 1 + if header_value == 'remote_site': + return item.status == lib.STATUS[2] and \ + item.remote_progress < 1 + + if role == Qt.DisplayRole: + # because of ImageDelegate + if header_value in ['remote_site', 'local_site']: + return "" + + return attr.asdict(item)[self._header[index.column()]] + if role == Qt.UserRole: + return item._id + + def add_page_records(self, local_site, remote_site, representations): + """ + Process all records from 'representation' and add them to storage. + + Args: + local_site (str): name of local site (mine) + remote_site (str): name of cloud provider (theirs) + representations (Mongo Cursor) - mimics result set, 1 object + with paginatedResults array and totalCount array + """ + result = representations.next() + count = 0 + total_count = result.get("totalCount") + if total_count: + count = total_count.pop().get('count') + self._total_records = count + + local_provider = lib.translate_provider_for_icon(self.sync_server, + self.project, + local_site) + remote_provider = lib.translate_provider_for_icon(self.sync_server, + self.project, + remote_site) + + for repre in result.get("paginatedResults"): + files = repre.get("files", []) + if isinstance(files, dict): # aggregate returns dictionary + files = [files] + + # representation without files doesnt concern us + if not files: + continue + + local_updated = remote_updated = None + if repre.get('updated_dt_local'): + local_updated = \ + repre.get('updated_dt_local').strftime("%Y%m%dT%H%M%SZ") + + if repre.get('updated_dt_remote'): + remote_updated = \ + repre.get('updated_dt_remote').strftime("%Y%m%dT%H%M%SZ") + + avg_progress_remote = lib.convert_progress( + repre.get('avg_progress_remote', '0')) + avg_progress_local = lib.convert_progress( + repre.get('avg_progress_local', '0')) + + if repre.get("version"): + version = "v{:0>3d}".format(repre.get("version")) + else: + version = "master" + + item = self.SyncRepresentation( + repre.get("_id"), + repre.get("asset"), + repre.get("subset"), + version, + repre.get("representation"), + local_updated, + remote_updated, + local_site, + remote_site, + local_provider, + remote_provider, + avg_progress_local, + avg_progress_remote, + repre.get("files_count", 1), + lib.pretty_size(repre.get("files_size", 0)), + 1, + lib.STATUS[repre.get("status", -1)], + files[0].get('path') + ) + + self._data.append(item) + self._rec_loaded += 1 + + def get_query(self, limit=0): + """ + Returns basic aggregate query for main table. + + Main table provides summary information about representation, + which could have multiple files. Details are accessible after + double click on representation row. + Columns: + 'created_dt' - max of created or updated (when failed) per repr + 'sync_dt' - same for remote side + 'local_site' - progress of repr on local side, 1 = finished + 'remote_site' - progress on remote side, calculates from files + 'status' - + 0 - in progress + 1 - failed + 2 - queued + 3 - paused + 4 - finished on both sides + + are calculated and must be calculated in DB because of + pagination + + Args: + limit (int): how many records should be returned, by default + it 'PAGE_SIZE' for performance. + Should be overridden by value of loaded records for refresh + functionality (got more records by scrolling, refresh + shouldn't reset that) + """ + if limit == 0: + limit = SyncRepresentationSummaryModel.PAGE_SIZE + + aggr = [ + {"$match": self.get_match_part()}, + {'$unwind': '$files'}, + # merge potentially unwinded records back to single per repre + {'$addFields': { + 'order_remote': { + '$filter': {'input': '$files.sites', 'as': 'p', + 'cond': {'$eq': ['$$p.name', self.remote_site]} + }}, + 'order_local': { + '$filter': {'input': '$files.sites', 'as': 'p', + 'cond': {'$eq': ['$$p.name', self.active_site]} + }} + }}, + {'$addFields': { + # prepare progress per file, presence of 'created_dt' denotes + # successfully finished load/download + 'progress_remote': {'$first': { + '$cond': [{'$size': "$order_remote.progress"}, + "$order_remote.progress", + {'$cond': [ + {'$size': "$order_remote.created_dt"}, + [1], + [0] + ]} + ]}}, + 'progress_local': {'$first': { + '$cond': [{'$size': "$order_local.progress"}, + "$order_local.progress", + {'$cond': [ + {'$size': "$order_local.created_dt"}, + [1], + [0] + ]} + ]}}, + # file might be successfully created or failed, not both + 'updated_dt_remote': {'$first': { + '$cond': [{'$size': "$order_remote.created_dt"}, + "$order_remote.created_dt", + {'$cond': [ + {'$size': "$order_remote.last_failed_dt"}, + "$order_remote.last_failed_dt", + [] + ]} + ]}}, + 'updated_dt_local': {'$first': { + '$cond': [{'$size': "$order_local.created_dt"}, + "$order_local.created_dt", + {'$cond': [ + {'$size': "$order_local.last_failed_dt"}, + "$order_local.last_failed_dt", + [] + ]} + ]}}, + 'files_size': {'$ifNull': ["$files.size", 0]}, + 'failed_remote': { + '$cond': [{'$size': "$order_remote.last_failed_dt"}, + 1, + 0]}, + 'failed_local': { + '$cond': [{'$size': "$order_local.last_failed_dt"}, + 1, + 0]}, + 'failed_local_tries': { + '$cond': [{'$size': '$order_local.tries'}, + {'$first': '$order_local.tries'}, + 0]}, + 'failed_remote_tries': { + '$cond': [{'$size': '$order_remote.tries'}, + {'$first': '$order_remote.tries'}, + 0]}, + 'paused_remote': { + '$cond': [{'$size': "$order_remote.paused"}, + 1, + 0]}, + 'paused_local': { + '$cond': [{'$size': "$order_local.paused"}, + 1, + 0]}, + }}, + {'$group': { + '_id': '$_id', + # pass through context - same for representation + 'context': {'$addToSet': '$context'}, + 'data': {'$addToSet': '$data'}, + # pass through files as a list + 'files': {'$addToSet': '$files'}, + # count how many files + 'files_count': {'$sum': 1}, + 'files_size': {'$sum': '$files_size'}, + # sum avg progress, finished = 1 + 'avg_progress_remote': {'$avg': "$progress_remote"}, + 'avg_progress_local': {'$avg': "$progress_local"}, + # select last touch of file + 'updated_dt_remote': {'$max': "$updated_dt_remote"}, + 'failed_remote': {'$sum': '$failed_remote'}, + 'failed_local': {'$sum': '$failed_local'}, + 'failed_remote_tries': {'$sum': '$failed_remote_tries'}, + 'failed_local_tries': {'$sum': '$failed_local_tries'}, + 'paused_remote': {'$sum': '$paused_remote'}, + 'paused_local': {'$sum': '$paused_local'}, + 'updated_dt_local': {'$max': "$updated_dt_local"} + }}, + {"$project": self.projection} + ] + + if self.column_filtering: + aggr.append( + {"$match": self.column_filtering} + ) + + aggr.extend( + [{"$sort": self.sort}, + { + '$facet': { + 'paginatedResults': [{'$skip': self._rec_loaded}, + {'$limit': limit}], + 'totalCount': [{'$count': 'count'}] + } + }] + ) + + return aggr + + def get_match_part(self): + """ + Extend match part with word_filter if present. + + Filter is set by user input. Each model has different fields to be + checked. + If performance issues are found, '$text' and text indexes should + be investigated. + + Fulltext searches in: + context.subset + context.asset + context.representation names AND _id (ObjectId) + """ + base_match = { + "type": "representation", + 'files.sites.name': {'$all': [self.active_site, + self.remote_site]} + } + if not self._word_filter: + return base_match + else: + regex_str = '.*{}.*'.format(self._word_filter) + base_match['$or'] = [ + {'context.subset': {'$regex': regex_str, '$options': 'i'}}, + {'context.asset': {'$regex': regex_str, '$options': 'i'}}, + {'context.representation': {'$regex': regex_str, + '$options': 'i'}}] + + if ObjectId.is_valid(self._word_filter): + base_match['$or'] = [{'_id': ObjectId(self._word_filter)}] + + return base_match + + @property + def projection(self): + """ + Projection part for aggregate query. + + All fields with '1' will be returned, no others. + + Returns: + (dict) + """ + return { + "subset": {"$first": "$context.subset"}, + "asset": {"$first": "$context.asset"}, + "version": {"$first": "$context.version"}, + "representation": {"$first": "$context.representation"}, + "data.path": 1, + "files": 1, + 'files_count': 1, + "files_size": 1, + 'avg_progress_remote': 1, + 'avg_progress_local': 1, + 'updated_dt_remote': 1, + 'updated_dt_local': 1, + 'paused_remote': 1, + 'paused_local': 1, + 'status': { + '$switch': { + 'branches': [ + { + 'case': { + '$or': ['$paused_remote', '$paused_local']}, + 'then': 3 # Paused + }, + { + 'case': { + '$or': [ + {'$gte': ['$failed_local_tries', 3]}, + {'$gte': ['$failed_remote_tries', 3]} + ]}, + 'then': 2}, # Failed + { + 'case': { + '$or': [{'$eq': ['$avg_progress_remote', 0]}, + {'$eq': ['$avg_progress_local', 0]}]}, + 'then': 1 # Queued + }, + { + 'case': {'$or': [{'$and': [ + {'$gt': ['$avg_progress_remote', 0]}, + {'$lt': ['$avg_progress_remote', 1]} + ]}, + {'$and': [ + {'$gt': ['$avg_progress_local', 0]}, + {'$lt': ['$avg_progress_local', 1]} + ]} + ]}, + 'then': 0 # In progress + }, + { + 'case': {'$and': [ + {'$eq': ['$avg_progress_remote', 1]}, + {'$eq': ['$avg_progress_local', 1]} + ]}, + 'then': 4 # Synced OK + }, + ], + 'default': -1 + } + } + } + + +class SyncRepresentationDetailModel(_SyncRepresentationModel): + """ + List of all syncronizable files per single representation. + + Used in detail window accessible after clicking on single repre in the + summary. + + Args: + sync_server (SyncServer) - object to call server operations (update + db status, set site status...) + header (list) - names of visible columns + _id (string) - MongoDB _id of representation + project (string) - collection name, all queries must be called on + a specific collection + """ + COLUMN_LABELS = [ + ("file", "File name"), + ("local_site", "Active site"), + ("remote_site", "Remote site"), + ("files_size", "Size"), + ("priority", "Priority"), + ("status", "Status") + ] + + PAGE_SIZE = 30 + DEFAULT_SORT = { + "files.path": 1 + } + SORT_BY_COLUMN = [ + "files.path", + "updated_dt_local", # local created_dt + "updated_dt_remote", # remote created_dt + "size", # remote progress + "size", # priority TODO + "status" # status + ] + + COLUMN_FILTERS = { + 'status': lib.PredefinedSetFilter('status', lib.STATUS), + 'file': lib.RegexTextFilter('file'), + } + + refresh_started = QtCore.Signal() + refresh_finished = QtCore.Signal() + + @attr.s + class SyncRepresentationDetail: + """ + Auxiliary object for easier handling. + + Fields must contain all header values (+ any arbitrary values). + """ + _id = attr.ib() + file = attr.ib() + created_dt = attr.ib(default=None) + sync_dt = attr.ib(default=None) + local_site = attr.ib(default=None) + remote_site = attr.ib(default=None) + local_provider = attr.ib(default=None) + remote_provider = attr.ib(default=None) + local_progress = attr.ib(default=None) + remote_progress = attr.ib(default=None) + size = attr.ib(default=None) + priority = attr.ib(default=None) + status = attr.ib(default=None) + tries = attr.ib(default=None) + error = attr.ib(default=None) + path = attr.ib(default=None) + + def __init__(self, sync_server, header, _id, + project=None): + super(SyncRepresentationDetailModel, self).__init__() + self._header = header + self._data = [] + self._project = project + self._rec_loaded = 0 + self._total_records = 0 # how many documents query actually found + self._word_filter = None + self._id = _id + self._initialized = False + self._column_filtering = {} + + self.sync_server = sync_server + # TODO think about admin mode + # this is for regular user, always only single local and single remote + self.active_site = self.sync_server.get_active_site(self.project) + self.remote_site = self.sync_server.get_remote_site(self.project) + + self.sort = self.DEFAULT_SORT + + self.query = self.get_query() + representations = self.dbcon.aggregate(self.query) + self.refresh(representations) + + self.timer = QtCore.QTimer() + self.timer.timeout.connect(self.tick) + self.timer.start(SyncRepresentationSummaryModel.REFRESH_SEC) + + def data(self, index, role): + item = self._data[index.row()] + + if role == lib.FullItemRole: + return item + + header_value = self._header[index.column()] + if role == lib.ProviderRole: + if header_value == 'local_site': + return item.local_provider + if header_value == 'remote_site': + return item.remote_provider + + if role == lib.ProgressRole: + if header_value == 'local_site': + return item.local_progress + if header_value == 'remote_site': + return item.remote_progress + + if role == lib.DateRole: + if header_value == 'local_site': + if item.created_dt: + return pretty_timestamp(item.created_dt) + if header_value == 'remote_site': + if item.sync_dt: + return pretty_timestamp(item.sync_dt) + + if role == lib.FailedRole: + if header_value == 'local_site': + return item.status == lib.STATUS[2] and \ + item.local_progress < 1 + if header_value == 'remote_site': + return item.status == lib.STATUS[2] and \ + item.remote_progress < 1 + + if role == Qt.DisplayRole: + # because of ImageDelegate + if header_value in ['remote_site', 'local_site']: + return "" + return attr.asdict(item)[self._header[index.column()]] + if role == Qt.UserRole: + return item._id + + def add_page_records(self, local_site, remote_site, representations): + """ + Process all records from 'representation' and add them to storage. + + Args: + local_site (str): name of local site (mine) + remote_site (str): name of cloud provider (theirs) + representations (Mongo Cursor) - mimics result set, 1 object + with paginatedResults array and totalCount array + """ + # representations is a Cursor, get first + result = representations.next() + count = 0 + total_count = result.get("totalCount") + if total_count: + count = total_count.pop().get('count') + self._total_records = count + + local_provider = lib.translate_provider_for_icon(self.sync_server, + self.project, + local_site) + remote_provider = lib.translate_provider_for_icon(self.sync_server, + self.project, + remote_site) + + for repre in result.get("paginatedResults"): + # log.info("!!! repre:: {}".format(repre)) + files = repre.get("files", []) + if isinstance(files, dict): # aggregate returns dictionary + files = [files] + + for file in files: + local_updated = remote_updated = None + if repre.get('updated_dt_local'): + local_updated = \ + repre.get('updated_dt_local').strftime( + "%Y%m%dT%H%M%SZ") + + if repre.get('updated_dt_remote'): + remote_updated = \ + repre.get('updated_dt_remote').strftime( + "%Y%m%dT%H%M%SZ") + + remote_progress = lib.convert_progress( + repre.get('progress_remote', '0')) + local_progress = lib.convert_progress( + repre.get('progress_local', '0')) + + errors = [] + if repre.get('failed_remote_error'): + errors.append(repre.get('failed_remote_error')) + if repre.get('failed_local_error'): + errors.append(repre.get('failed_local_error')) + + item = self.SyncRepresentationDetail( + file.get("_id"), + os.path.basename(file["path"]), + local_updated, + remote_updated, + local_site, + remote_site, + local_provider, + remote_provider, + local_progress, + remote_progress, + lib.pretty_size(file.get('size', 0)), + 1, + lib.STATUS[repre.get("status", -1)], + repre.get("tries"), + '\n'.join(errors), + file.get('path') + + ) + self._data.append(item) + self._rec_loaded += 1 + + def get_query(self, limit=0): + """ + Gets query that gets used when no extra sorting, filtering or + projecting is needed. + + Called for basic table view. + + Returns: + [(dict)] - list with single dict - appropriate for aggregate + function for MongoDB + """ + if limit == 0: + limit = SyncRepresentationSummaryModel.PAGE_SIZE + + aggr = [ + {"$match": self.get_match_part()}, + {"$unwind": "$files"}, + {'$addFields': { + 'order_remote': { + '$filter': {'input': '$files.sites', 'as': 'p', + 'cond': {'$eq': ['$$p.name', self.remote_site]} + }}, + 'order_local': { + '$filter': {'input': '$files.sites', 'as': 'p', + 'cond': {'$eq': ['$$p.name', self.active_site]} + }} + }}, + {'$addFields': { + # prepare progress per file, presence of 'created_dt' denotes + # successfully finished load/download + 'progress_remote': {'$first': { + '$cond': [{'$size': "$order_remote.progress"}, + "$order_remote.progress", + {'$cond': [ + {'$size': "$order_remote.created_dt"}, + [1], + [0] + ]} + ]}}, + 'progress_local': {'$first': { + '$cond': [{'$size': "$order_local.progress"}, + "$order_local.progress", + {'$cond': [ + {'$size': "$order_local.created_dt"}, + [1], + [0] + ]} + ]}}, + # file might be successfully created or failed, not both + 'updated_dt_remote': {'$first': { + '$cond': [ + {'$size': "$order_remote.created_dt"}, + "$order_remote.created_dt", + { + '$cond': [ + {'$size': "$order_remote.last_failed_dt"}, + "$order_remote.last_failed_dt", + [] + ] + } + ] + }}, + 'updated_dt_local': {'$first': { + '$cond': [ + {'$size': "$order_local.created_dt"}, + "$order_local.created_dt", + { + '$cond': [ + {'$size': "$order_local.last_failed_dt"}, + "$order_local.last_failed_dt", + [] + ] + } + ] + }}, + 'paused_remote': { + '$cond': [{'$size': "$order_remote.paused"}, + 1, + 0]}, + 'paused_local': { + '$cond': [{'$size': "$order_local.paused"}, + 1, + 0]}, + 'failed_remote': { + '$cond': [{'$size': "$order_remote.last_failed_dt"}, + 1, + 0]}, + 'failed_local': { + '$cond': [{'$size': "$order_local.last_failed_dt"}, + 1, + 0]}, + 'failed_remote_error': {'$first': { + '$cond': [{'$size': "$order_remote.error"}, + "$order_remote.error", + [""]]}}, + 'failed_local_error': {'$first': { + '$cond': [{'$size': "$order_local.error"}, + "$order_local.error", + [""]]}}, + 'tries': {'$first': { + '$cond': [ + {'$size': "$order_local.tries"}, + "$order_local.tries", + {'$cond': [ + {'$size': "$order_remote.tries"}, + "$order_remote.tries", + [] + ]} + ]}} + }}, + {"$project": self.projection} + ] + + if self.column_filtering: + aggr.append( + {"$match": self.column_filtering} + ) + print(self.column_filtering) + + aggr.extend([ + {"$sort": self.sort}, + { + '$facet': { + 'paginatedResults': [{'$skip': self._rec_loaded}, + {'$limit': limit}], + 'totalCount': [{'$count': 'count'}] + } + } + ]) + + return aggr + + def get_match_part(self): + """ + Returns different content for 'match' portion if filtering by + name is present + + Returns: + (dict) + """ + if not self._word_filter: + return { + "type": "representation", + "_id": self._id + } + else: + regex_str = '.*{}.*'.format(self._word_filter) + return { + "type": "representation", + "_id": self._id, + '$or': [{'files.path': {'$regex': regex_str, '$options': 'i'}}] + } + + @property + def projection(self): + """ + Projection part for aggregate query. + + All fields with '1' will be returned, no others. + + Returns: + (dict) + """ + return { + "files": 1, + 'progress_remote': 1, + 'progress_local': 1, + 'updated_dt_remote': 1, + 'updated_dt_local': 1, + 'paused_remote': 1, + 'paused_local': 1, + 'failed_remote_error': 1, + 'failed_local_error': 1, + 'tries': 1, + 'status': { + '$switch': { + 'branches': [ + { + 'case': { + '$or': ['$paused_remote', '$paused_local']}, + 'then': 3 # Paused + }, + { + 'case': { + '$and': [{'$or': ['$failed_remote', + '$failed_local']}, + {'$eq': ['$tries', 3]}]}, + 'then': 2 # Failed (3 tries) + }, + { + 'case': { + '$or': [{'$eq': ['$progress_remote', 0]}, + {'$eq': ['$progress_local', 0]}]}, + 'then': 1 # Queued + }, + { + 'case': { + '$or': ['$failed_remote', '$failed_local']}, + 'then': 2 # Failed + }, + { + 'case': {'$or': [{'$and': [ + {'$gt': ['$progress_remote', 0]}, + {'$lt': ['$progress_remote', 1]} + ]}, + {'$and': [ + {'$gt': ['$progress_local', 0]}, + {'$lt': ['$progress_local', 1]} + ]} + ]}, + 'then': 0 # In Progress + }, + { + 'case': {'$and': [ + {'$eq': ['$progress_remote', 1]}, + {'$eq': ['$progress_local', 1]} + ]}, + 'then': 4 # Synced OK + }, + ], + 'default': -1 + } + }, + 'data.path': 1 + } diff --git a/openpype/modules/sync_server/tray/widgets.py b/openpype/modules/sync_server/tray/widgets.py new file mode 100644 index 0000000000..106fc4b8a8 --- /dev/null +++ b/openpype/modules/sync_server/tray/widgets.py @@ -0,0 +1,1103 @@ +import os +import subprocess +import sys +from functools import partial + +from Qt import QtWidgets, QtCore, QtGui +from Qt.QtCore import Qt + +from openpype.tools.settings import ( + ProjectListWidget, + style +) + +from openpype.api import get_local_site_id +from openpype.lib import PypeLogger + +from avalon.tools.delegates import pretty_timestamp +from avalon.vendor import qtawesome + +from openpype.modules.sync_server.tray.models import ( + SyncRepresentationSummaryModel, + SyncRepresentationDetailModel +) + +from openpype.modules.sync_server.tray import lib + +log = PypeLogger().get_logger("SyncServer") + + +class SyncProjectListWidget(ProjectListWidget): + """ + Lists all projects that are synchronized to choose from + """ + + def __init__(self, sync_server, parent): + super(SyncProjectListWidget, self).__init__(parent) + self.sync_server = sync_server + self.project_list.setContextMenuPolicy(QtCore.Qt.CustomContextMenu) + self.project_list.customContextMenuRequested.connect( + self._on_context_menu) + self.project_name = None + self.local_site = None + self.icons = {} + + self.layout().setContentsMargins(0, 0, 0, 0) + + def validate_context_change(self): + return True + + def refresh(self): + model = self.project_list.model() + model.clear() + + project_name = None + for project_name in self.sync_server.sync_project_settings.\ + keys(): + if self.sync_server.is_paused() or \ + self.sync_server.is_project_paused(project_name): + icon = self._get_icon("paused") + else: + icon = self._get_icon("synced") + + model.appendRow(QtGui.QStandardItem(icon, project_name)) + + if len(self.sync_server.sync_project_settings.keys()) == 0: + model.appendRow(QtGui.QStandardItem(lib.DUMMY_PROJECT)) + + self.current_project = self.project_list.currentIndex().data( + QtCore.Qt.DisplayRole + ) + if not self.current_project: + self.current_project = self.project_list.model().item(0). \ + data(QtCore.Qt.DisplayRole) + + if project_name: + self.local_site = self.sync_server.get_active_site(project_name) + + def _get_icon(self, status): + if not self.icons.get(status): + resource_path = os.path.dirname(__file__) + resource_path = os.path.join(resource_path, "..", + "resources") + pix_url = "{}/{}.png".format(resource_path, status) + icon = QtGui.QIcon(pix_url) + self.icons[status] = icon + else: + icon = self.icons[status] + return icon + + def _on_context_menu(self, point): + point_index = self.project_list.indexAt(point) + if not point_index.isValid(): + return + + self.project_name = point_index.data(QtCore.Qt.DisplayRole) + + menu = QtWidgets.QMenu() + actions_mapping = {} + + if self.sync_server.is_project_paused(self.project_name): + action = QtWidgets.QAction("Unpause") + actions_mapping[action] = self._unpause + else: + action = QtWidgets.QAction("Pause") + actions_mapping[action] = self._pause + menu.addAction(action) + + if self.local_site == get_local_site_id(): + action = QtWidgets.QAction("Clear local project") + actions_mapping[action] = self._clear_project + menu.addAction(action) + + result = menu.exec_(QtGui.QCursor.pos()) + if result: + to_run = actions_mapping[result] + if to_run: + to_run() + + def _pause(self): + if self.project_name: + self.sync_server.pause_project(self.project_name) + self.project_name = None + self.refresh() + + def _unpause(self): + if self.project_name: + self.sync_server.unpause_project(self.project_name) + self.project_name = None + self.refresh() + + def _clear_project(self): + if self.project_name: + self.sync_server.clear_project(self.project_name, self.local_site) + self.project_name = None + self.refresh() + + +class _SyncRepresentationWidget(QtWidgets.QWidget): + """ + Summary dialog with list of representations that matches current + settings 'local_site' and 'remote_site'. + """ + active_changed = QtCore.Signal() # active index changed + message_generated = QtCore.Signal(str) + + def _selection_changed(self, _new_selected, _all_selected): + idxs = self.selection_model.selectedRows() + self._selected_ids = [] + + for index in idxs: + self._selected_ids.append(self.model.data(index, Qt.UserRole)) + + def _set_selection(self): + """ + Sets selection to 'self._selected_id' if exists. + + Keep selection during model refresh. + """ + existing_ids = [] + for selected_id in self._selected_ids: + index = self.model.get_index(selected_id) + if index and index.isValid(): + mode = QtCore.QItemSelectionModel.Select | \ + QtCore.QItemSelectionModel.Rows + self.selection_model.select(index, mode) + existing_ids.append(selected_id) + + self._selected_ids = existing_ids + + def _double_clicked(self, index): + """ + Opens representation dialog with all files after doubleclick + """ + _id = self.model.data(index, Qt.UserRole) + detail_window = SyncServerDetailWindow( + self.sync_server, _id, self.model.project) + detail_window.exec() + + def _on_context_menu(self, point): + """ + Shows menu with loader actions on Right-click. + + Supports multiple selects - adds all available actions, each + action handles if it appropriate for item itself, if not it skips. + """ + is_multi = len(self._selected_ids) > 1 + point_index = self.table_view.indexAt(point) + if not point_index.isValid() and not is_multi: + return + + if is_multi: + index = self.model.get_index(self._selected_ids[0]) + item = self.model.data(index, lib.FullItemRole) + else: + item = self.model.data(point_index, lib.FullItemRole) + + action_kwarg_map, actions_mapping, menu = self._prepare_menu(item, + is_multi) + + result = menu.exec_(QtGui.QCursor.pos()) + if result: + to_run = actions_mapping[result] + to_run_kwargs = action_kwarg_map.get(result, {}) + if to_run: + to_run(**to_run_kwargs) + + self.model.refresh() + + def _prepare_menu(self, item, is_multi): + menu = QtWidgets.QMenu() + + actions_mapping = {} + action_kwarg_map = {} + + active_site = self.model.active_site + remote_site = self.model.remote_site + + local_progress = item.local_progress + remote_progress = item.remote_progress + + project = self.model.project + + for site, progress in {active_site: local_progress, + remote_site: remote_progress}.items(): + provider = self.sync_server.get_provider_for_site(project, site) + if provider == 'local_drive': + if 'studio' in site: + txt = " studio version" + else: + txt = " local version" + action = QtWidgets.QAction("Open in explorer" + txt) + if progress == 1.0 or is_multi: + actions_mapping[action] = self._open_in_explorer + action_kwarg_map[action] = \ + self._get_action_kwargs(site) + menu.addAction(action) + + if remote_progress == 1.0 or is_multi: + action = QtWidgets.QAction("Re-sync Active site") + action_kwarg_map[action] = self._get_action_kwargs(active_site) + actions_mapping[action] = self._reset_site + menu.addAction(action) + + if local_progress == 1.0 or is_multi: + action = QtWidgets.QAction("Re-sync Remote site") + action_kwarg_map[action] = self._get_action_kwargs(remote_site) + actions_mapping[action] = self._reset_site + menu.addAction(action) + + if active_site == get_local_site_id(): + action = QtWidgets.QAction("Completely remove from local") + action_kwarg_map[action] = self._get_action_kwargs(active_site) + actions_mapping[action] = self._remove_site + menu.addAction(action) + + # # temp for testing only !!! + # action = QtWidgets.QAction("Download") + # action_kwarg_map[action] = self._get_action_kwargs(active_site) + # actions_mapping[action] = self._add_site + # menu.addAction(action) + + if not actions_mapping: + action = QtWidgets.QAction("< No action >") + actions_mapping[action] = None + menu.addAction(action) + + return action_kwarg_map, actions_mapping, menu + + def _pause(self, selected_ids=None): + log.debug("Pause {}".format(selected_ids)) + for representation_id in selected_ids: + item = lib.get_item_by_id(self.model, representation_id) + if item.status not in [lib.STATUS[0], lib.STATUS[1]]: + continue + for site_name in [self.model.active_site, self.model.remote_site]: + check_progress = self._get_progress(item, site_name) + if check_progress < 1: + self.sync_server.pause_representation(self.model.project, + representation_id, + site_name) + + self.message_generated.emit("Paused {}".format(representation_id)) + + def _unpause(self, selected_ids=None): + log.debug("UnPause {}".format(selected_ids)) + for representation_id in selected_ids: + item = lib.get_item_by_id(self.model, representation_id) + if item.status not in lib.STATUS[3]: + continue + for site_name in [self.model.active_site, self.model.remote_site]: + check_progress = self._get_progress(item, site_name) + if check_progress < 1: + self.sync_server.unpause_representation( + self.model.project, + representation_id, + site_name) + + self.message_generated.emit("Unpause {}".format(representation_id)) + + # temporary here for testing, will be removed TODO + def _add_site(self, selected_ids=None, site_name=None): + log.debug("Add site {}:{}".format(selected_ids, site_name)) + for representation_id in selected_ids: + item = lib.get_item_by_id(self.model, representation_id) + if item.local_site == site_name or item.remote_site == site_name: + # site already exists skip + continue + + try: + self.sync_server.add_site( + self.model.project, + representation_id, + site_name) + self.message_generated.emit( + "Site {} added for {}".format(site_name, + representation_id)) + except ValueError as exp: + self.message_generated.emit("Error {}".format(str(exp))) + self.sync_server.reset_timer() + + def _remove_site(self, selected_ids=None, site_name=None): + """ + Removes site record AND files. + + This is ONLY for representations stored on local site, which + cannot be same as SyncServer.DEFAULT_SITE. + + This could only happen when artist work on local machine, not + connected to studio mounted drives. + """ + log.debug("Remove site {}:{}".format(selected_ids, site_name)) + for representation_id in selected_ids: + log.info("Removing {}".format(representation_id)) + try: + self.sync_server.remove_site( + self.model.project, + representation_id, + site_name, + True) + self.message_generated.emit( + "Site {} removed".format(site_name)) + except ValueError as exp: + self.message_generated.emit("Error {}".format(str(exp))) + + self.model.refresh( + load_records=self.model._rec_loaded) + self.sync_server.reset_timer() + + def _reset_site(self, selected_ids=None, site_name=None): + """ + Removes errors or success metadata for particular file >> forces + redo of upload/download + """ + log.debug("Reset site {}:{}".format(selected_ids, site_name)) + for representation_id in selected_ids: + item = lib.get_item_by_id(self.model, representation_id) + check_progress = self._get_progress(item, site_name, True) + + # do not reset if opposite side is not fully there + if check_progress != 1: + log.debug("Not fully available {} on other side, skipping". + format(check_progress)) + continue + + self.sync_server.reset_provider_for_file( + self.model.project, + representation_id, + site_name=site_name, + force=True) + + self.model.refresh( + load_records=self.model._rec_loaded) + self.sync_server.reset_timer() + + def _open_in_explorer(self, selected_ids=None, site_name=None): + log.debug("Open in Explorer {}:{}".format(selected_ids, site_name)) + for selected_id in selected_ids: + item = lib.get_item_by_id(self.model, selected_id) + if not item: + return + + fpath = item.path + project = self.model.project + fpath = self.sync_server.get_local_file_path(project, + site_name, + fpath) + + fpath = os.path.normpath(os.path.dirname(fpath)) + if os.path.isdir(fpath): + if 'win' in sys.platform: # windows + subprocess.Popen('explorer "%s"' % fpath) + elif sys.platform == 'darwin': # macOS + subprocess.Popen(['open', fpath]) + else: # linux + try: + subprocess.Popen(['xdg-open', fpath]) + except OSError: + raise OSError('unsupported xdg-open call??') + + def _get_progress(self, item, site_name, opposite=False): + """Returns progress value according to site (side)""" + progress = {'local': item.local_progress, + 'remote': item.remote_progress} + side = 'remote' + if site_name == self.model.active_site: + side = 'local' + if opposite: + side = 'remote' if side == 'local' else 'local' + + return progress[side] + + def _get_action_kwargs(self, site_name): + """Default format of kwargs for action""" + return {"selected_ids": self._selected_ids, "site_name": site_name} + + def _save_scrollbar(self): + self._scrollbar_pos = self.table_view.verticalScrollBar().value() + + def _set_scrollbar(self): + if self._scrollbar_pos: + self.table_view.verticalScrollBar().setValue(self._scrollbar_pos) + + +class SyncRepresentationSummaryWidget(_SyncRepresentationWidget): + + default_widths = ( + ("asset", 190), + ("subset", 170), + ("version", 60), + ("representation", 145), + ("local_site", 160), + ("remote_site", 160), + ("files_count", 50), + ("files_size", 60), + ("priority", 70), + ("status", 110) + ) + + def __init__(self, sync_server, project=None, parent=None): + super(SyncRepresentationSummaryWidget, self).__init__(parent) + + self.sync_server = sync_server + + self._selected_ids = [] # keep last selected _id + + txt_filter = QtWidgets.QLineEdit() + txt_filter.setPlaceholderText("Quick filter representations..") + txt_filter.setClearButtonEnabled(True) + txt_filter.addAction( + qtawesome.icon("fa.filter", color="gray"), + QtWidgets.QLineEdit.LeadingPosition) + self.txt_filter = txt_filter + + self._scrollbar_pos = None + + top_bar_layout = QtWidgets.QHBoxLayout() + top_bar_layout.addWidget(self.txt_filter) + + table_view = QtWidgets.QTableView() + headers = [item[0] for item in self.default_widths] + + model = SyncRepresentationSummaryModel(sync_server, headers, project) + table_view.setModel(model) + table_view.setContextMenuPolicy(QtCore.Qt.CustomContextMenu) + table_view.setSelectionMode( + QtWidgets.QAbstractItemView.ExtendedSelection) + table_view.setSelectionBehavior( + QtWidgets.QAbstractItemView.SelectRows) + table_view.horizontalHeader().setSortIndicator( + -1, Qt.AscendingOrder) + table_view.setAlternatingRowColors(True) + table_view.verticalHeader().hide() + + column = table_view.model().get_header_index("local_site") + delegate = ImageDelegate(self) + table_view.setItemDelegateForColumn(column, delegate) + + column = table_view.model().get_header_index("remote_site") + delegate = ImageDelegate(self) + table_view.setItemDelegateForColumn(column, delegate) + + layout = QtWidgets.QVBoxLayout(self) + layout.setContentsMargins(0, 0, 0, 0) + layout.addLayout(top_bar_layout) + layout.addWidget(table_view) + + self.table_view = table_view + self.model = model + + horizontal_header = HorizontalHeader(self) + + table_view.setHorizontalHeader(horizontal_header) + table_view.setSortingEnabled(True) + + for column_name, width in self.default_widths: + idx = model.get_header_index(column_name) + table_view.setColumnWidth(idx, width) + + table_view.doubleClicked.connect(self._double_clicked) + self.txt_filter.textChanged.connect(lambda: model.set_word_filter( + self.txt_filter.text())) + table_view.customContextMenuRequested.connect(self._on_context_menu) + + model.refresh_started.connect(self._save_scrollbar) + model.refresh_finished.connect(self._set_scrollbar) + model.modelReset.connect(self._set_selection) + + self.selection_model = self.table_view.selectionModel() + self.selection_model.selectionChanged.connect(self._selection_changed) + + def _prepare_menu(self, item, is_multi): + action_kwarg_map, actions_mapping, menu = \ + super()._prepare_menu(item, is_multi) + + if item.status in [lib.STATUS[0], lib.STATUS[1]] or is_multi: + action = QtWidgets.QAction("Pause in queue") + actions_mapping[action] = self._pause + # pause handles which site_name it will pause itself + action_kwarg_map[action] = {"selected_ids": self._selected_ids} + menu.addAction(action) + + if item.status == lib.STATUS[3] or is_multi: + action = QtWidgets.QAction("Unpause in queue") + actions_mapping[action] = self._unpause + action_kwarg_map[action] = {"selected_ids": self._selected_ids} + menu.addAction(action) + + return action_kwarg_map, actions_mapping, menu + + +class SyncServerDetailWindow(QtWidgets.QDialog): + """Wrapper window for SyncRepresentationDetailWidget + + Creates standalone window with list of files for selected repre_id. + """ + def __init__(self, sync_server, _id, project, parent=None): + log.debug( + "!!! SyncServerDetailWindow _id:: {}".format(_id)) + super(SyncServerDetailWindow, self).__init__(parent) + self.setWindowFlags(QtCore.Qt.Window) + self.setFocusPolicy(QtCore.Qt.StrongFocus) + + self.setStyleSheet(style.load_stylesheet()) + self.setWindowIcon(QtGui.QIcon(style.app_icon_path())) + self.resize(1000, 400) + + body = QtWidgets.QWidget() + footer = QtWidgets.QWidget() + footer.setFixedHeight(20) + + container = SyncRepresentationDetailWidget(sync_server, _id, project, + parent=self) + body_layout = QtWidgets.QHBoxLayout(body) + body_layout.addWidget(container) + body_layout.setContentsMargins(0, 0, 0, 0) + + self.message = QtWidgets.QLabel() + self.message.hide() + + footer_layout = QtWidgets.QVBoxLayout(footer) + footer_layout.addWidget(self.message) + footer_layout.setContentsMargins(0, 0, 0, 0) + + layout = QtWidgets.QVBoxLayout(self) + layout.addWidget(body) + layout.addWidget(footer) + + self.setLayout(body_layout) + self.setWindowTitle("Sync Representation Detail") + + +class SyncRepresentationDetailWidget(_SyncRepresentationWidget): + """ + Widget to display list of synchronizable files for single repre. + + Args: + _id (str): representation _id + project (str): name of project with repre + parent (QDialog): SyncServerDetailWindow + """ + active_changed = QtCore.Signal() # active index changed + + default_widths = ( + ("file", 290), + ("local_site", 185), + ("remote_site", 185), + ("size", 60), + ("priority", 60), + ("status", 110) + ) + + def __init__(self, sync_server, _id=None, project=None, parent=None): + super(SyncRepresentationDetailWidget, self).__init__(parent) + + log.debug("Representation_id:{}".format(_id)) + self.project = project + + self.sync_server = sync_server + + self.representation_id = _id + self._selected_ids = [] + + self.txt_filter = QtWidgets.QLineEdit() + self.txt_filter.setPlaceholderText("Quick filter representation..") + self.txt_filter.setClearButtonEnabled(True) + self.txt_filter.addAction(qtawesome.icon("fa.filter", color="gray"), + QtWidgets.QLineEdit.LeadingPosition) + + self._scrollbar_pos = None + + top_bar_layout = QtWidgets.QHBoxLayout() + top_bar_layout.addWidget(self.txt_filter) + + table_view = QtWidgets.QTableView() + headers = [item[0] for item in self.default_widths] + + model = SyncRepresentationDetailModel(sync_server, headers, _id, + project) + table_view.setModel(model) + table_view.setContextMenuPolicy(QtCore.Qt.CustomContextMenu) + table_view.setSelectionMode( + QtWidgets.QAbstractItemView.ExtendedSelection) + table_view.setSelectionBehavior( + QtWidgets.QTableView.SelectRows) + table_view.horizontalHeader().setSortIndicator(-1, Qt.AscendingOrder) + table_view.horizontalHeader().setSortIndicatorShown(True) + table_view.setAlternatingRowColors(True) + table_view.verticalHeader().hide() + + column = model.get_header_index("local_site") + delegate = ImageDelegate(self) + table_view.setItemDelegateForColumn(column, delegate) + + column = model.get_header_index("remote_site") + delegate = ImageDelegate(self) + table_view.setItemDelegateForColumn(column, delegate) + + layout = QtWidgets.QVBoxLayout(self) + layout.setContentsMargins(0, 0, 0, 0) + layout.addLayout(top_bar_layout) + layout.addWidget(table_view) + + self.model = model + + self.selection_model = table_view.selectionModel() + self.selection_model.selectionChanged.connect(self._selection_changed) + + horizontal_header = HorizontalHeader(self) + + table_view.setHorizontalHeader(horizontal_header) + table_view.setSortingEnabled(True) + + for column_name, width in self.default_widths: + idx = model.get_header_index(column_name) + table_view.setColumnWidth(idx, width) + + self.table_view = table_view + + self.txt_filter.textChanged.connect(lambda: model.set_word_filter( + self.txt_filter.text())) + table_view.customContextMenuRequested.connect(self._on_context_menu) + + model.refresh_started.connect(self._save_scrollbar) + model.refresh_finished.connect(self._set_scrollbar) + model.modelReset.connect(self._set_selection) + + def _show_detail(self, selected_ids=None): + """ + Shows windows with error message for failed sync of a file. + """ + detail_window = SyncRepresentationErrorWindow(self.model, selected_ids) + + detail_window.exec() + + def _prepare_menu(self, item, is_multi): + """Adds view (and model) dependent actions to default ones""" + action_kwarg_map, actions_mapping, menu = \ + super()._prepare_menu(item, is_multi) + + if item.status == lib.STATUS[2] or is_multi: + action = QtWidgets.QAction("Open error detail") + actions_mapping[action] = self._show_detail + action_kwarg_map[action] = {"selected_ids": self._selected_ids} + + menu.addAction(action) + + return action_kwarg_map, actions_mapping, menu + + def _reset_site(self, selected_ids=None, site_name=None): + """ + Removes errors or success metadata for particular file >> forces + redo of upload/download + """ + for file_id in selected_ids: + item = lib.get_item_by_id(self.model, file_id) + check_progress = self._get_progress(item, site_name, True) + + # do not reset if opposite side is not fully there + if check_progress != 1: + log.debug("Not fully available {} on other side, skipping". + format(check_progress)) + continue + + self.sync_server.reset_provider_for_file( + self.model.project, + self.representation_id, + site_name=site_name, + file_id=file_id, + force=True) + self.model.refresh( + load_records=self.model._rec_loaded) + + +class SyncRepresentationErrorWindow(QtWidgets.QDialog): + """Wrapper window to show errors during sync on file(s)""" + def __init__(self, model, selected_ids, parent=None): + super(SyncRepresentationErrorWindow, self).__init__(parent) + self.setWindowFlags(QtCore.Qt.Window) + self.setFocusPolicy(QtCore.Qt.StrongFocus) + + self.setStyleSheet(style.load_stylesheet()) + self.setWindowIcon(QtGui.QIcon(style.app_icon_path())) + self.resize(900, 150) + + body = QtWidgets.QWidget() + + container = SyncRepresentationErrorWidget(model, + selected_ids, + parent=self) + body_layout = QtWidgets.QHBoxLayout(body) + body_layout.addWidget(container) + body_layout.setContentsMargins(0, 0, 0, 0) + + message = QtWidgets.QLabel() + message.hide() + + layout = QtWidgets.QVBoxLayout(self) + layout.addWidget(body) + + self.setLayout(body_layout) + self.setWindowTitle("Sync Representation Error Detail") + + +class SyncRepresentationErrorWidget(QtWidgets.QWidget): + """ + Dialog to show when sync error happened, prints formatted error message + """ + def __init__(self, model, selected_ids, parent=None): + super(SyncRepresentationErrorWidget, self).__init__(parent) + + layout = QtWidgets.QVBoxLayout(self) + + no_errors = True + for file_id in selected_ids: + item = lib.get_item_by_id(model, file_id) + if not item.created_dt or not item.sync_dt or not item.error: + continue + + no_errors = False + dt = max(item.created_dt, item.sync_dt) + + txts = [] + txts.append("{}: {}
".format("Last update date", + pretty_timestamp(dt))) + txts.append("{}: {}
".format("Retries", + str(item.tries))) + txts.append("{}: {}
".format("Error message", + item.error)) + + text_area = QtWidgets.QTextEdit("\n\n".join(txts)) + text_area.setReadOnly(True) + layout.addWidget(text_area) + + if no_errors: + text_area = QtWidgets.QTextEdit() + text_area.setText("

No errors located

") + text_area.setReadOnly(True) + layout.addWidget(text_area) + + +class ImageDelegate(QtWidgets.QStyledItemDelegate): + """ + Prints icon of site and progress of synchronization + """ + + def __init__(self, parent=None): + super(ImageDelegate, self).__init__(parent) + self.icons = {} + + def paint(self, painter, option, index): + super(ImageDelegate, self).paint(painter, option, index) + option = QtWidgets.QStyleOptionViewItem(option) + option.showDecorationSelected = True + + provider = index.data(lib.ProviderRole) + value = index.data(lib.ProgressRole) + date_value = index.data(lib.DateRole) + is_failed = index.data(lib.FailedRole) + + if not self.icons.get(provider): + resource_path = os.path.dirname(__file__) + resource_path = os.path.join(resource_path, "..", + "providers", "resources") + pix_url = "{}/{}.png".format(resource_path, provider) + pixmap = QtGui.QPixmap(pix_url) + self.icons[provider] = pixmap + else: + pixmap = self.icons[provider] + + padding = 10 + point = QtCore.QPoint(option.rect.x() + padding, + option.rect.y() + + (option.rect.height() - pixmap.height()) / 2) + painter.drawPixmap(point, pixmap) + + overlay_rect = option.rect.translated(0, 0) + overlay_rect.setHeight(overlay_rect.height() * (1.0 - float(value))) + painter.fillRect(overlay_rect, + QtGui.QBrush(QtGui.QColor(0, 0, 0, 100))) + text_rect = option.rect.translated(10, 0) + painter.drawText(text_rect, + QtCore.Qt.AlignCenter, + date_value) + + if is_failed: + overlay_rect = option.rect.translated(0, 0) + painter.fillRect(overlay_rect, + QtGui.QBrush(QtGui.QColor(255, 0, 0, 35))) + + +class TransparentWidget(QtWidgets.QWidget): + """Used for header cell for resizing to work properly""" + clicked = QtCore.Signal(str) + + def __init__(self, column_name, *args, **kwargs): + super(TransparentWidget, self).__init__(*args, **kwargs) + self.column_name = column_name + # self.setStyleSheet("background: red;") + + def mouseReleaseEvent(self, event): + if event.button() == QtCore.Qt.LeftButton: + self.clicked.emit(self.column_name) + + super(TransparentWidget, self).mouseReleaseEvent(event) + + +class HorizontalHeader(QtWidgets.QHeaderView): + """Reiplemented QHeaderView to contain clickable changeable button""" + def __init__(self, parent=None): + super(HorizontalHeader, self).__init__(QtCore.Qt.Horizontal, parent) + self._parent = parent + self.checked_values = {} + + self.setModel(self._parent.model) + + self.setSectionsClickable(True) + + self.menu_items_dict = {} + self.menu = None + self.header_cells = [] + self.filter_buttons = {} + + self.filter_icon = qtawesome.icon("fa.filter", color="gray") + self.filter_set_icon = qtawesome.icon("fa.filter", color="white") + + self.init_layout() + + self._resetting = False + + @property + def model(self): + """Keep model synchronized with parent widget""" + return self._parent.model + + def init_layout(self): + """Initial preparation of header's content""" + for column_idx in range(self.model.columnCount()): + column_name, column_label = self.model.get_column(column_idx) + filter_rec = self.model.get_filters().get(column_name) + if not filter_rec: + continue + + icon = self.filter_icon + button = QtWidgets.QPushButton(icon, "", self) + + button.setFixedSize(24, 24) + button.setStyleSheet( + "QPushButton::menu-indicator{width:0px;}" + "QPushButton{border: none;background: transparent;}") + button.clicked.connect(partial(self._get_menu, + column_name, column_idx)) + button.setFlat(True) + self.filter_buttons[column_name] = button + + def showEvent(self, event): + """Paint header""" + super(HorizontalHeader, self).showEvent(event) + + for i in range(len(self.header_cells)): + cell_content = self.header_cells[i] + cell_content.setGeometry(self.sectionViewportPosition(i), 0, + self.sectionSize(i) - 1, self.height()) + + cell_content.show() + + def _set_filter_icon(self, column_name): + """Set different states of button depending on its engagement""" + button = self.filter_buttons.get(column_name) + if button: + if self.checked_values.get(column_name): + button.setIcon(self.filter_set_icon) + else: + button.setIcon(self.filter_icon) + + def _reset_filter(self, column_name): + """ + Remove whole column from filter >> not in $match at all (faster) + """ + self._resetting = True # mark changes to consume them + if self.checked_values.get(column_name) is not None: + self.checked_values.pop(column_name) + self._set_filter_icon(column_name) + self._filter_and_refresh_model_and_menu(column_name, True, True) + self._resetting = False + + def _apply_filter(self, column_name, values, state): + """ + Sets 'values' to specific 'state' (checked/unchecked), + sends to model. + """ + if self._resetting: # event triggered by _resetting, skip it + return + + self._update_checked_values(column_name, values, state) + self._set_filter_icon(column_name) + self._filter_and_refresh_model_and_menu(column_name, True, False) + + def _apply_text_filter(self, column_name, items, line_edit): + """ + Resets all checkboxes, prefers inserted text. + """ + le_text = line_edit.text() + self._update_checked_values(column_name, items, 0) # reset other + if self.checked_values.get(column_name) is not None or \ + le_text == '': + self.checked_values.pop(column_name) # reset during typing + + if le_text: + self._update_checked_values(column_name, {le_text: le_text}, 2) + self._set_filter_icon(column_name) + self._filter_and_refresh_model_and_menu(column_name, True, True) + + def _filter_and_refresh_model_and_menu(self, column_name, + model=True, menu=True): + """ + Refresh model and its content and possibly menu for big changes. + """ + if model: + self.model.set_column_filtering(self.checked_values) + self.model.refresh() + if menu: + self._menu_refresh(column_name) + + def _get_menu(self, column_name, index): + """Prepares content of menu for 'column_name'""" + menu = QtWidgets.QMenu(self) + filter_rec = self.model.get_filters()[column_name] + self.menu_items_dict[column_name] = filter_rec.values() + + # text filtering only if labels same as values, not if codes are used + if 'text' in filter_rec.search_variants(): + line_edit = QtWidgets.QLineEdit(menu) + line_edit.setClearButtonEnabled(True) + line_edit.addAction(self.filter_icon, + QtWidgets.QLineEdit.LeadingPosition) + + line_edit.setFixedHeight(line_edit.height()) + txt = "" + if self.checked_values.get(column_name): + txt = list(self.checked_values.get(column_name).keys())[0] + line_edit.setText(txt) + + action_le = QtWidgets.QWidgetAction(menu) + action_le.setDefaultWidget(line_edit) + line_edit.textChanged.connect( + partial(self._apply_text_filter, column_name, + filter_rec.values(), line_edit)) + menu.addAction(action_le) + menu.addSeparator() + + if 'checkbox' in filter_rec.search_variants(): + action_all = QtWidgets.QAction("All", self) + action_all.triggered.connect(partial(self._reset_filter, + column_name)) + menu.addAction(action_all) + + action_none = QtWidgets.QAction("Unselect all", self) + state_unchecked = 0 + action_none.triggered.connect(partial(self._apply_filter, + column_name, + filter_rec.values(), + state_unchecked)) + menu.addAction(action_none) + menu.addSeparator() + + # nothing explicitly >> ALL implicitly >> first time + if self.checked_values.get(column_name) is None: + checked_keys = self.menu_items_dict[column_name].keys() + else: + checked_keys = self.checked_values[column_name] + + for value, label in self.menu_items_dict[column_name].items(): + checkbox = QtWidgets.QCheckBox(str(label), menu) + + # temp + checkbox.setStyleSheet("QCheckBox{spacing: 5px;" + "padding:5px 5px 5px 5px;}") + if value in checked_keys: + checkbox.setChecked(True) + + action = QtWidgets.QWidgetAction(menu) + action.setDefaultWidget(checkbox) + + checkbox.stateChanged.connect(partial(self._apply_filter, + column_name, {value: label})) + menu.addAction(action) + + self.menu = menu + + self._show_menu(index, menu) + + def _show_menu(self, index, menu): + """Shows 'menu' under header column of 'index'""" + global_pos_point = self.mapToGlobal( + QtCore.QPoint(self.sectionViewportPosition(index), 0)) + menu.setMinimumWidth(self.sectionSize(index)) + menu.setMinimumHeight(self.height()) + menu.exec_(QtCore.QPoint(global_pos_point.x(), + global_pos_point.y() + self.height())) + + def _menu_refresh(self, column_name): + """ + Reset boxes after big change - word filtering or reset + """ + for action in self.menu.actions(): + if not isinstance(action, QtWidgets.QWidgetAction): + continue + + widget = action.defaultWidget() + if not isinstance(widget, QtWidgets.QCheckBox): + continue + + if not self.checked_values.get(column_name) or \ + widget.text() in self.checked_values[column_name].values(): + widget.setChecked(True) + else: + widget.setChecked(False) + + def _update_checked_values(self, column_name, values, state): + """ + Modify dictionary of set values in columns for filtering. + + Modifies 'self.checked_values' + """ + copy_menu_items = dict(self.menu_items_dict[column_name]) + checked = self.checked_values.get(column_name, copy_menu_items) + set_items = dict(values.items()) # prevent dict change during loop + for value, label in set_items.items(): + if state == 2 and label: # checked + checked[value] = label + elif state == 0 and checked.get(value): + checked.pop(value) + + self.checked_values[column_name] = checked + + def paintEvent(self, event): + self._fix_size() + super(HorizontalHeader, self).paintEvent(event) + + def _fix_size(self): + for column_idx in range(self.model.columnCount()): + vis_index = self.visualIndex(column_idx) + index = self.logicalIndex(vis_index) + section_width = self.sectionSize(index) + + column_name = self.model.headerData(column_idx, + QtCore.Qt.Horizontal, + lib.HeaderNameRole) + button = self.filter_buttons.get(column_name) + if not button: + continue + + pos_x = self.sectionViewportPosition( + index) + section_width - self.height() + + pos_y = 0 + if button.height() < self.height(): + pos_y = int((self.height() - button.height()) / 2) + button.setGeometry( + pos_x, + pos_y, + self.height(), + self.height()) diff --git a/openpype/modules/sync_server/utils.py b/openpype/modules/sync_server/utils.py index 0762766783..fa6e63b029 100644 --- a/openpype/modules/sync_server/utils.py +++ b/openpype/modules/sync_server/utils.py @@ -1,8 +1,19 @@ import time -from openpype.api import Logger +from openpype.api import Logger log = Logger().get_logger("SyncServer") +class ResumableError(Exception): + """Error which could be temporary, skip current loop, try next time""" + pass + + +class SyncStatus: + DO_NOTHING = 0 + DO_UPLOAD = 1 + DO_DOWNLOAD = 2 + + def time_function(method): """ Decorator to print how much time function took. For debugging. diff --git a/openpype/modules/timers_manager/timers_manager.py b/openpype/modules/timers_manager/timers_manager.py index a8ea5799e6..92edd5aeaa 100644 --- a/openpype/modules/timers_manager/timers_manager.py +++ b/openpype/modules/timers_manager/timers_manager.py @@ -45,11 +45,13 @@ class TimersManager(PypeModule, ITrayService, IIdleManager, IWebServerRoutes): timers_settings = modules_settings[self.name] self.enabled = timers_settings["enabled"] + auto_stop = timers_settings["auto_stop"] # When timer will stop if idle manager is running (minutes) full_time = int(timers_settings["full_time"] * 60) # How many minutes before the timer is stopped will popup the message message_time = int(timers_settings["message_time"] * 60) + self.auto_stop = auto_stop self.time_show_message = full_time - message_time self.time_stop_timer = full_time @@ -160,6 +162,9 @@ class TimersManager(PypeModule, ITrayService, IIdleManager, IWebServerRoutes): def callbacks_by_idle_time(self): """Implementation of IIdleManager interface.""" # Time when message is shown + if not self.auto_stop: + return {} + callbacks = collections.defaultdict(list) callbacks[self.time_show_message].append(lambda: self.time_callback(0)) diff --git a/openpype/modules/user/__init__.py b/openpype/modules/user/__init__.py deleted file mode 100644 index a97ac0eef6..0000000000 --- a/openpype/modules/user/__init__.py +++ /dev/null @@ -1,10 +0,0 @@ -from .user_module import ( - UserModule, - IUserModule -) - - -__all__ = ( - "UserModule", - "IUserModule" -) diff --git a/openpype/modules/user/rest_api.py b/openpype/modules/user/rest_api.py deleted file mode 100644 index 566425a19b..0000000000 --- a/openpype/modules/user/rest_api.py +++ /dev/null @@ -1,35 +0,0 @@ -import json -from aiohttp.web_response import Response - - -class UserModuleRestApi: - def __init__(self, user_module, server_manager): - self.module = user_module - self.server_manager = server_manager - - self.prefix = "/user" - - self.register() - - def register(self): - self.server_manager.add_route( - "GET", - self.prefix + "/username", - self.get_username - ) - self.server_manager.add_route( - "GET", - self.prefix + "/show_widget", - self.show_user_widget - ) - - async def get_username(self, request): - return Response( - status=200, - body=json.dumps(self.module.cred, indent=4), - content_type="application/json" - ) - - async def show_user_widget(self, request): - self.module.action_show_widget.trigger() - return Response(status=200) diff --git a/openpype/modules/user/user_module.py b/openpype/modules/user/user_module.py deleted file mode 100644 index 7d257f1781..0000000000 --- a/openpype/modules/user/user_module.py +++ /dev/null @@ -1,169 +0,0 @@ -import os -import json -import getpass - -from abc import ABCMeta, abstractmethod - -import six -import appdirs - -from .. import ( - PypeModule, - ITrayModule, - IWebServerRoutes -) - - -@six.add_metaclass(ABCMeta) -class IUserModule: - """Interface for other modules to use user change callbacks.""" - - @abstractmethod - def on_pype_user_change(self, username): - """What should happen on Pype user change.""" - pass - - -class UserModule(PypeModule, ITrayModule, IWebServerRoutes): - cred_folder_path = os.path.normpath( - appdirs.user_data_dir('pype-app', 'pype') - ) - cred_filename = 'user_info.json' - env_name = "OPENPYPE_USERNAME" - - name = "user" - - def initialize(self, modules_settings): - user_settings = modules_settings[self.name] - self.enabled = user_settings["enabled"] - - self.callbacks_on_user_change = [] - self.cred = {} - self.cred_path = os.path.normpath(os.path.join( - self.cred_folder_path, self.cred_filename - )) - - # Tray attributes - self.widget_login = None - self.action_show_widget = None - - self.rest_api_obj = None - - def tray_init(self): - from .widget_user import UserWidget - self.widget_login = UserWidget(self) - - self.load_credentials() - - def register_callback_on_user_change(self, callback): - self.callbacks_on_user_change.append(callback) - - def tray_start(self): - """Store credentials to env and preset them to widget""" - username = "" - if self.cred: - username = self.cred.get("username") or "" - - os.environ[self.env_name] = username - self.widget_login.set_user(username) - - def tray_exit(self): - """Nothing special for User.""" - return - - def get_user(self): - return self.cred.get("username") or getpass.getuser() - - def webserver_initialization(self, server_manager): - """Implementation of IWebServerRoutes interface.""" - from .rest_api import UserModuleRestApi - - self.rest_api_obj = UserModuleRestApi(self, server_manager) - - def connect_with_modules(self, enabled_modules): - for module in enabled_modules: - if isinstance(module, IUserModule): - self.callbacks_on_user_change.append( - module.on_pype_user_change - ) - - # Definition of Tray menu - def tray_menu(self, parent_menu): - from Qt import QtWidgets - """Add menu or action to Tray(or parent)'s menu""" - action = QtWidgets.QAction("Username", parent_menu) - action.triggered.connect(self.show_widget) - parent_menu.addAction(action) - parent_menu.addSeparator() - - self.action_show_widget = action - - def load_credentials(self): - """Get credentials from JSON file """ - credentials = {} - try: - file = open(self.cred_path, "r") - credentials = json.load(file) - file.close() - - self.cred = credentials - username = credentials.get("username") - if username: - self.log.debug("Loaded Username \"{}\"".format(username)) - else: - self.log.debug("Pype Username is not set") - - return credentials - - except FileNotFoundError: - return self.save_credentials(getpass.getuser()) - - except json.decoder.JSONDecodeError: - self.log.warning(( - "File where users credentials should be stored" - " has invalid json format. Loading system username." - )) - return self.save_credentials(getpass.getuser()) - - def change_credentials(self, username): - self.save_credentials(username) - for callback in self.callbacks_on_user_change: - try: - callback(username) - except Exception: - self.log.warning( - "Failed to execute callback \"{}\".".format( - str(callback) - ), - exc_info=True - ) - - def save_credentials(self, username): - """Save credentials to JSON file, env and widget""" - if username is None: - username = "" - - username = str(username).strip() - - self.cred = {"username": username} - os.environ[self.env_name] = username - if self.widget_login: - self.widget_login.set_user(username) - try: - file = open(self.cred_path, "w") - file.write(json.dumps(self.cred)) - file.close() - self.log.debug("Username \"{}\" stored".format(username)) - except Exception: - self.log.error( - "Could not store username to file \"{}\"".format( - self.cred_path - ), - exc_info=True - ) - - return self.cred - - def show_widget(self): - """Show dialog to enter credentials""" - self.widget_login.show() diff --git a/openpype/modules/user/widget_user.py b/openpype/modules/user/widget_user.py deleted file mode 100644 index f8ecadf56b..0000000000 --- a/openpype/modules/user/widget_user.py +++ /dev/null @@ -1,88 +0,0 @@ -from Qt import QtCore, QtGui, QtWidgets -from avalon import style -from openpype import resources - - -class UserWidget(QtWidgets.QWidget): - - MIN_WIDTH = 300 - - def __init__(self, module): - - super(UserWidget, self).__init__() - - self.module = module - - # Style - icon = QtGui.QIcon(resources.pype_icon_filepath()) - self.setWindowIcon(icon) - self.setWindowTitle("Username Settings") - self.setMinimumWidth(self.MIN_WIDTH) - self.setStyleSheet(style.load_stylesheet()) - - self.setWindowFlags( - QtCore.Qt.WindowCloseButtonHint | - QtCore.Qt.WindowMinimizeButtonHint - ) - - self.setLayout(self._main()) - - def show(self, *args, **kwargs): - super().show(*args, **kwargs) - # Move widget to center of active screen on show - screen = QtWidgets.QApplication.desktop().screen() - screen_center = lambda self: ( - screen.rect().center() - self.rect().center() - ) - self.move(screen_center(self)) - - def _main(self): - main_layout = QtWidgets.QVBoxLayout() - - form_layout = QtWidgets.QFormLayout() - form_layout.setContentsMargins(10, 15, 10, 5) - - label_username = QtWidgets.QLabel("Username:") - label_username.setCursor(QtGui.QCursor(QtCore.Qt.ArrowCursor)) - label_username.setTextFormat(QtCore.Qt.RichText) - - input_username = QtWidgets.QLineEdit() - input_username.setPlaceholderText( - QtCore.QCoreApplication.translate("main", "e.g. John Smith") - ) - - form_layout.addRow(label_username, input_username) - - btn_save = QtWidgets.QPushButton("Save") - btn_save.clicked.connect(self.click_save) - - btn_cancel = QtWidgets.QPushButton("Cancel") - btn_cancel.clicked.connect(self.close) - - btn_group = QtWidgets.QHBoxLayout() - btn_group.addStretch(1) - btn_group.addWidget(btn_save) - btn_group.addWidget(btn_cancel) - - main_layout.addLayout(form_layout) - main_layout.addLayout(btn_group) - - self.input_username = input_username - - return main_layout - - def set_user(self, username): - self.input_username.setText(username) - - def click_save(self): - # all what should happen - validations and saving into appsdir - username = self.input_username.text() - self.module.change_credentials(username) - self._close_widget() - - def closeEvent(self, event): - event.ignore() - self._close_widget() - - def _close_widget(self): - self.hide() diff --git a/openpype/plugins/load/add_site.py b/openpype/plugins/load/add_site.py new file mode 100644 index 0000000000..09448d553c --- /dev/null +++ b/openpype/plugins/load/add_site.py @@ -0,0 +1,33 @@ +from avalon import api +from openpype.modules import ModulesManager + + +class AddSyncSite(api.Loader): + """Add sync site to representation""" + representations = ["*"] + families = ["*"] + + label = "Add Sync Site" + order = 2 # lower means better + icon = "download" + color = "#999999" + + def load(self, context, name=None, namespace=None, data=None): + self.log.info("Adding {} to representation: {}".format( + data["site_name"], data["_id"])) + self.add_site_to_representation(data["project_name"], + data["_id"], + data["site_name"]) + self.log.debug("Site added.") + + @staticmethod + def add_site_to_representation(project_name, representation_id, site_name): + """Adds new site to representation_id, resets if exists""" + manager = ModulesManager() + sync_server = manager.modules_by_name["sync_server"] + sync_server.add_site(project_name, representation_id, site_name, + force=True) + + def filepath_from_context(self, context): + """No real file loading""" + return "" diff --git a/openpype/plugins/load/delete_old_versions.py b/openpype/plugins/load/delete_old_versions.py index e5132e0f8a..8e3999e9c4 100644 --- a/openpype/plugins/load/delete_old_versions.py +++ b/openpype/plugins/load/delete_old_versions.py @@ -15,11 +15,12 @@ from openpype.api import Anatomy class DeleteOldVersions(api.Loader): - + """Deletes specific number of old version""" representations = ["*"] families = ["*"] label = "Delete Old Versions" + order = 35 icon = "trash" color = "#d8d8d8" @@ -421,8 +422,9 @@ class DeleteOldVersions(api.Loader): class CalculateOldVersions(DeleteOldVersions): - + """Calculate file size of old versions""" label = "Calculate Old Versions" + order = 30 options = [ qargparse.Integer( diff --git a/openpype/plugins/load/remove_site.py b/openpype/plugins/load/remove_site.py new file mode 100644 index 0000000000..aedb5d1f2f --- /dev/null +++ b/openpype/plugins/load/remove_site.py @@ -0,0 +1,33 @@ +from avalon import api +from openpype.modules import ModulesManager + + +class RemoveSyncSite(api.Loader): + """Remove sync site and its files on representation""" + representations = ["*"] + families = ["*"] + + label = "Remove Sync Site" + order = 4 + icon = "download" + color = "#999999" + + def load(self, context, name=None, namespace=None, data=None): + self.log.info("Removing {} on representation: {}".format( + data["site_name"], data["_id"])) + self.remove_site_on_representation(data["project_name"], + data["_id"], + data["site_name"]) + self.log.debug("Site added.") + + @staticmethod + def remove_site_on_representation(project_name, representation_id, + site_name): + manager = ModulesManager() + sync_server = manager.modules_by_name["sync_server"] + sync_server.remove_site(project_name, representation_id, + site_name, True) + + def filepath_from_context(self, context): + """No real file loading""" + return "" diff --git a/openpype/plugins/publish/collect_current_pype_user.py b/openpype/plugins/publish/collect_current_pype_user.py index de4e950d56..003c779836 100644 --- a/openpype/plugins/publish/collect_current_pype_user.py +++ b/openpype/plugins/publish/collect_current_pype_user.py @@ -1,6 +1,7 @@ import os import getpass import pyblish.api +from openpype.lib import get_openpype_username class CollectCurrentUserPype(pyblish.api.ContextPlugin): @@ -11,9 +12,6 @@ class CollectCurrentUserPype(pyblish.api.ContextPlugin): label = "Collect Pype User" def process(self, context): - user = os.getenv("OPENPYPE_USERNAME", "").strip() - if not user: - user = context.data.get("user", getpass.getuser()) - + user = get_openpype_username() context.data["user"] = user self.log.debug("Colected user \"{}\"".format(user)) diff --git a/openpype/plugins/publish/collect_hierarchy.py b/openpype/plugins/publish/collect_hierarchy.py index 5c5dbf018c..1aa10fcb9b 100644 --- a/openpype/plugins/publish/collect_hierarchy.py +++ b/openpype/plugins/publish/collect_hierarchy.py @@ -15,7 +15,7 @@ class CollectHierarchy(pyblish.api.ContextPlugin): label = "Collect Hierarchy" order = pyblish.api.CollectorOrder - 0.57 families = ["shot"] - hosts = ["resolve"] + hosts = ["resolve", "hiero"] def process(self, context): temp_context = {} @@ -40,7 +40,7 @@ class CollectHierarchy(pyblish.api.ContextPlugin): continue # exclude if not masterLayer True - if not instance.data.get("masterLayer"): + if not instance.data.get("heroTrack"): continue # get asset build data if any available @@ -50,7 +50,7 @@ class CollectHierarchy(pyblish.api.ContextPlugin): # suppose that all instances are Shots shot_data['entity_type'] = 'Shot' - shot_data['tasks'] = instance.data.get("tasks") or [] + shot_data['tasks'] = instance.data.get("tasks") or {} shot_data["comments"] = instance.data.get("comments", []) shot_data['custom_attributes'] = { diff --git a/openpype/plugins/publish/collect_otio_frame_ranges.py b/openpype/plugins/publish/collect_otio_frame_ranges.py index 53cc249033..e1b8b95a46 100644 --- a/openpype/plugins/publish/collect_otio_frame_ranges.py +++ b/openpype/plugins/publish/collect_otio_frame_ranges.py @@ -20,7 +20,7 @@ class CollectOcioFrameRanges(pyblish.api.InstancePlugin): label = "Collect OTIO Frame Ranges" order = pyblish.api.CollectorOrder - 0.58 families = ["shot", "clip"] - hosts = ["resolve"] + hosts = ["resolve", "hiero"] def process(self, instance): # get basic variables diff --git a/openpype/plugins/publish/collect_otio_review.py b/openpype/plugins/publish/collect_otio_review.py index 0c7eeaea44..e2375c70c9 100644 --- a/openpype/plugins/publish/collect_otio_review.py +++ b/openpype/plugins/publish/collect_otio_review.py @@ -22,7 +22,7 @@ class CollectOcioReview(pyblish.api.InstancePlugin): label = "Collect OTIO Review" order = pyblish.api.CollectorOrder - 0.57 families = ["clip"] - hosts = ["resolve"] + hosts = ["resolve", "hiero"] def process(self, instance): # get basic variables @@ -88,6 +88,7 @@ class CollectOcioReview(pyblish.api.InstancePlugin): otio_review_clips.append(otio_gap) if otio_review_clips: + instance.data["label"] += " (review)" instance.data["families"] += ["review", "ftrack"] instance.data["otioReviewClips"] = otio_review_clips self.log.info( diff --git a/openpype/plugins/publish/collect_otio_subset_resources.py b/openpype/plugins/publish/collect_otio_subset_resources.py index a0c6b9339b..d687c1920a 100644 --- a/openpype/plugins/publish/collect_otio_subset_resources.py +++ b/openpype/plugins/publish/collect_otio_subset_resources.py @@ -19,7 +19,7 @@ class CollectOcioSubsetResources(pyblish.api.InstancePlugin): label = "Collect OTIO Subset Resources" order = pyblish.api.CollectorOrder - 0.57 families = ["clip"] - hosts = ["resolve"] + hosts = ["resolve", "hiero"] def process(self, instance): if not instance.data.get("representations"): @@ -48,8 +48,8 @@ class CollectOcioSubsetResources(pyblish.api.InstancePlugin): trimmed_media_range) a_frame_start, a_frame_end = openpype.lib.otio_range_to_frame_range( otio_avalable_range) - a_frame_start_h, a_frame_end_h = openpype.lib.otio_range_to_frame_range( - trimmed_media_range_h) + a_frame_start_h, a_frame_end_h = openpype.lib.\ + otio_range_to_frame_range(trimmed_media_range_h) # fix frame_start and frame_end frame to be in range of media if a_frame_start_h < a_frame_start: @@ -80,6 +80,7 @@ class CollectOcioSubsetResources(pyblish.api.InstancePlugin): media_ref = otio_clip.media_reference metadata = media_ref.metadata + is_sequence = None # check in two way if it is sequence if hasattr(otio.schema, "ImageSequenceReference"): # for OpenTimelineIO 0.13 and newer @@ -116,7 +117,7 @@ class CollectOcioSubsetResources(pyblish.api.InstancePlugin): # `ImageSequenceReference` path = media_ref.target_url collection_data = openpype.lib.make_sequence_collection( - path, trimmed_media_range, metadata) + path, trimmed_media_range_h, metadata) self.staging_dir, collection = collection_data self.log.debug(collection) @@ -126,7 +127,7 @@ class CollectOcioSubsetResources(pyblish.api.InstancePlugin): dirname, filename = os.path.split(media_ref.target_url) self.staging_dir = dirname - self.log.debug(path) + self.log.debug(filename) repre = self._create_representation( frame_start, frame_end, file=filename) diff --git a/openpype/plugins/publish/extract_hierarchy_avalon.py b/openpype/plugins/publish/extract_hierarchy_avalon.py index dd1f09bafa..e263edd931 100644 --- a/openpype/plugins/publish/extract_hierarchy_avalon.py +++ b/openpype/plugins/publish/extract_hierarchy_avalon.py @@ -2,7 +2,6 @@ import pyblish.api from avalon import io from copy import deepcopy - class ExtractHierarchyToAvalon(pyblish.api.ContextPlugin): """Create entities in Avalon based on collected data.""" @@ -100,13 +99,20 @@ class ExtractHierarchyToAvalon(pyblish.api.ContextPlugin): if entity: # Do not override data, only update cur_entity_data = entity.get("data") or {} + entity_tasks = cur_entity_data["tasks"] or {} + + # create tasks as dict by default + if not entity_tasks: + cur_entity_data["tasks"] = entity_tasks + new_tasks = data.pop("tasks", {}) if "tasks" not in cur_entity_data and not new_tasks: continue for task_name in new_tasks: - if task_name in cur_entity_data["tasks"].keys(): + if task_name in entity_tasks.keys(): continue - cur_entity_data["tasks"][task_name] = new_tasks[task_name] + cur_entity_data["tasks"][task_name] = new_tasks[ + task_name] cur_entity_data.update(data) data = cur_entity_data else: diff --git a/openpype/plugins/publish/extract_otio_file.py b/openpype/plugins/publish/extract_otio_file.py index 146f3b88ec..3bd217d5d4 100644 --- a/openpype/plugins/publish/extract_otio_file.py +++ b/openpype/plugins/publish/extract_otio_file.py @@ -12,7 +12,7 @@ class ExtractOTIOFile(openpype.api.Extractor): label = "Extract OTIO file" order = pyblish.api.ExtractorOrder - 0.45 families = ["workfile"] - hosts = ["resolve"] + hosts = ["resolve", "hiero"] def process(self, instance): # create representation data diff --git a/openpype/plugins/publish/extract_otio_review.py b/openpype/plugins/publish/extract_otio_review.py index 91a680ddb0..07fe6f2731 100644 --- a/openpype/plugins/publish/extract_otio_review.py +++ b/openpype/plugins/publish/extract_otio_review.py @@ -40,8 +40,8 @@ class ExtractOTIOReview(openpype.api.Extractor): order = api.ExtractorOrder - 0.45 label = "Extract OTIO review" - hosts = ["resolve"] families = ["review"] + hosts = ["resolve", "hiero"] # plugin default attributes temp_file_head = "tempFile." @@ -188,7 +188,7 @@ class ExtractOTIOReview(openpype.api.Extractor): # creating and registering representation representation = self._create_representation(start, duration) instance.data["representations"].append(representation) - self.log.info(f"Adding representation: {representation}") + self.log.info("Adding representation: {}".format(representation)) def _create_representation(self, start, duration): """ @@ -388,7 +388,7 @@ class ExtractOTIOReview(openpype.api.Extractor): (int(end_offset + duration) + 1)): seq_number = padding.format(start_frame + index) self.log.debug( - f"index: `{index}` | seq_number: `{seq_number}`") + "index: `{}` | seq_number: `{}`".format(index, seq_number)) new_frames.append(int(seq_number)) new_frames += self.used_frames self.used_frames = new_frames diff --git a/openpype/plugins/publish/extract_review.py b/openpype/plugins/publish/extract_review.py index 23c8ed2a8e..f341ba197f 100644 --- a/openpype/plugins/publish/extract_review.py +++ b/openpype/plugins/publish/extract_review.py @@ -2,12 +2,18 @@ import os import re import copy import json -import pyblish.api + import clique + +import pyblish.api import openpype.api -import openpype.lib -from openpype.lib import should_decompress, \ - get_decompress_dir, decompress +from openpype.lib import ( + get_ffmpeg_tool_path, + ffprobe_streams, + should_decompress, + get_decompress_dir, + decompress +) class ExtractReview(pyblish.api.InstancePlugin): @@ -43,17 +49,11 @@ class ExtractReview(pyblish.api.InstancePlugin): supported_exts = image_exts + video_exts # FFmpeg tools paths - ffmpeg_path = openpype.lib.get_ffmpeg_tool_path("ffmpeg") + ffmpeg_path = get_ffmpeg_tool_path("ffmpeg") # Preset attributes profiles = None - # Legacy attributes - outputs = {} - ext_filter = [] - to_width = 1920 - to_height = 1080 - def process(self, instance): self.log.debug(instance.data["representations"]) # Skip review when requested. @@ -72,10 +72,6 @@ class ExtractReview(pyblish.api.InstancePlugin): ).format(instance_label)) return - # Use legacy processing when `profiles` is not set. - if self.profiles is None: - return self.legacy_process(instance) - # Run processing self.main_process(instance) @@ -337,10 +333,24 @@ class ExtractReview(pyblish.api.InstancePlugin): # Get FFmpeg arguments from profile presets out_def_ffmpeg_args = output_def.get("ffmpeg_args") or {} - ffmpeg_input_args = out_def_ffmpeg_args.get("input") or [] - ffmpeg_output_args = out_def_ffmpeg_args.get("output") or [] - ffmpeg_video_filters = out_def_ffmpeg_args.get("video_filters") or [] - ffmpeg_audio_filters = out_def_ffmpeg_args.get("audio_filters") or [] + _ffmpeg_input_args = out_def_ffmpeg_args.get("input") or [] + _ffmpeg_output_args = out_def_ffmpeg_args.get("output") or [] + _ffmpeg_video_filters = out_def_ffmpeg_args.get("video_filters") or [] + _ffmpeg_audio_filters = out_def_ffmpeg_args.get("audio_filters") or [] + + # Cleanup empty strings + ffmpeg_input_args = [ + value for value in _ffmpeg_input_args if value.strip() + ] + ffmpeg_output_args = [ + value for value in _ffmpeg_output_args if value.strip() + ] + ffmpeg_video_filters = [ + value for value in _ffmpeg_video_filters if value.strip() + ] + ffmpeg_audio_filters = [ + value for value in _ffmpeg_audio_filters if value.strip() + ] if isinstance(new_repre['files'], list): input_files_urls = [os.path.join(new_repre["stagingDir"], f) for f @@ -708,6 +718,105 @@ class ExtractReview(pyblish.api.InstancePlugin): return audio_in_args, audio_filters, audio_out_args + def get_letterbox_filters( + self, + letter_box_def, + input_res_ratio, + output_res_ratio, + pixel_aspect, + scale_factor_by_width, + scale_factor_by_height + ): + output = [] + + ratio = letter_box_def["ratio"] + state = letter_box_def["state"] + fill_color = letter_box_def["fill_color"] + f_red, f_green, f_blue, f_alpha = fill_color + fill_color_hex = "{0:0>2X}{1:0>2X}{2:0>2X}".format( + f_red, f_green, f_blue + ) + fill_color_alpha = float(f_alpha) / 255 + + line_thickness = letter_box_def["line_thickness"] + line_color = letter_box_def["line_color"] + l_red, l_green, l_blue, l_alpha = line_color + line_color_hex = "{0:0>2X}{1:0>2X}{2:0>2X}".format( + l_red, l_green, l_blue + ) + line_color_alpha = float(l_alpha) / 255 + + if input_res_ratio == output_res_ratio: + ratio /= pixel_aspect + elif input_res_ratio < output_res_ratio: + ratio /= scale_factor_by_width + else: + ratio /= scale_factor_by_height + + if state == "letterbox": + if fill_color_alpha > 0: + top_box = ( + "drawbox=0:0:iw:round((ih-(iw*(1/{})))/2):t=fill:c={}@{}" + ).format(ratio, fill_color_hex, fill_color_alpha) + + bottom_box = ( + "drawbox=0:ih-round((ih-(iw*(1/{0})))/2)" + ":iw:round((ih-(iw*(1/{0})))/2):t=fill:c={1}@{2}" + ).format(ratio, fill_color_hex, fill_color_alpha) + + output.extend([top_box, bottom_box]) + + if line_color_alpha > 0 and line_thickness > 0: + top_line = ( + "drawbox=0:round((ih-(iw*(1/{0})))/2)-{1}:iw:{1}:" + "t=fill:c={2}@{3}" + ).format( + ratio, line_thickness, line_color_hex, line_color_alpha + ) + bottom_line = ( + "drawbox=0:ih-round((ih-(iw*(1/{})))/2)" + ":iw:{}:t=fill:c={}@{}" + ).format( + ratio, line_thickness, line_color_hex, line_color_alpha + ) + output.extend([top_line, bottom_line]) + + elif state == "pillar": + if fill_color_alpha > 0: + left_box = ( + "drawbox=0:0:round((iw-(ih*{}))/2):ih:t=fill:c={}@{}" + ).format(ratio, fill_color_hex, fill_color_alpha) + + right_box = ( + "drawbox=iw-round((iw-(ih*{0}))/2))" + ":0:round((iw-(ih*{0}))/2):ih:t=fill:c={1}@{2}" + ).format(ratio, fill_color_hex, fill_color_alpha) + + output.extend([left_box, right_box]) + + if line_color_alpha > 0 and line_thickness > 0: + left_line = ( + "drawbox=round((iw-(ih*{}))/2):0:{}:ih:t=fill:c={}@{}" + ).format( + ratio, line_thickness, line_color_hex, line_color_alpha + ) + + right_line = ( + "drawbox=iw-round((iw-(ih*{}))/2))" + ":0:{}:ih:t=fill:c={}@{}" + ).format( + ratio, line_thickness, line_color_hex, line_color_alpha + ) + + output.extend([left_line, right_line]) + + else: + raise ValueError( + "Letterbox state \"{}\" is not recognized".format(state) + ) + + return output + def rescaling_filters(self, temp_data, output_def, new_repre): """Prepare vieo filters based on tags in new representation. @@ -719,14 +828,15 @@ class ExtractReview(pyblish.api.InstancePlugin): """ filters = [] - letter_box = output_def.get("letter_box") + letter_box_def = output_def["letter_box"] + letter_box_enabled = letter_box_def["enabled"] # Get instance data pixel_aspect = temp_data["pixel_aspect"] # NOTE Skipped using instance's resolution full_input_path_single_file = temp_data["full_input_path_single_file"] - input_data = openpype.lib.ffprobe_streams( + input_data = ffprobe_streams( full_input_path_single_file, self.log )[0] input_width = int(input_data["width"]) @@ -799,7 +909,7 @@ class ExtractReview(pyblish.api.InstancePlugin): if ( output_width == input_width and output_height == input_height - and not letter_box + and not letter_box_enabled and pixel_aspect == 1 ): self.log.debug( @@ -838,30 +948,24 @@ class ExtractReview(pyblish.api.InstancePlugin): ) # letter_box - if letter_box: - if input_res_ratio == output_res_ratio: - letter_box /= pixel_aspect - elif input_res_ratio < output_res_ratio: - letter_box /= scale_factor_by_width - else: - letter_box /= scale_factor_by_height - - scale_filter = "scale={}x{}:flags=lanczos".format( - output_width, output_height + if letter_box_enabled: + filters.extend([ + "scale={}x{}:flags=lanczos".format( + output_width, output_height + ), + "setsar=1" + ]) + filters.extend( + self.get_letterbox_filters( + letter_box_def, + input_res_ratio, + output_res_ratio, + pixel_aspect, + scale_factor_by_width, + scale_factor_by_height + ) ) - top_box = ( - "drawbox=0:0:iw:round((ih-(iw*(1/{})))/2):t=fill:c=black" - ).format(letter_box) - - bottom_box = ( - "drawbox=0:ih-round((ih-(iw*(1/{0})))/2)" - ":iw:round((ih-(iw*(1/{0})))/2):t=fill:c=black" - ).format(letter_box) - - # Add letter box filters - filters.extend([scale_filter, "setsar=1", top_box, bottom_box]) - # scaling none square pixels and 1920 width if ( input_height != output_height @@ -1253,438 +1357,6 @@ class ExtractReview(pyblish.api.InstancePlugin): return filtered_outputs - def legacy_process(self, instance): - self.log.warning("Legacy review presets are used.") - - output_profiles = self.outputs or {} - - inst_data = instance.data - context_data = instance.context.data - fps = float(inst_data.get("fps")) - frame_start = inst_data.get("frameStart") - frame_end = inst_data.get("frameEnd") - handle_start = inst_data.get("handleStart", - context_data.get("handleStart")) - handle_end = inst_data.get("handleEnd", - context_data.get("handleEnd")) - pixel_aspect = inst_data.get("pixelAspect", 1) - resolution_width = inst_data.get("resolutionWidth", self.to_width) - resolution_height = inst_data.get("resolutionHeight", self.to_height) - self.log.debug("Families In: `{}`".format(inst_data["families"])) - self.log.debug("__ frame_start: {}".format(frame_start)) - self.log.debug("__ frame_end: {}".format(frame_end)) - self.log.debug("__ handle_start: {}".format(handle_start)) - self.log.debug("__ handle_end: {}".format(handle_end)) - - # get representation and loop them - representations = inst_data["representations"] - - ffmpeg_path = openpype.lib.get_ffmpeg_tool_path("ffmpeg") - - # filter out mov and img sequences - representations_new = representations[:] - for repre in representations: - - if repre['ext'] not in self.ext_filter: - continue - - tags = repre.get("tags", []) - - if inst_data.get("multipartExr") is True: - # ffmpeg doesn't support multipart exrs - continue - - if "thumbnail" in tags: - continue - - self.log.info("Try repre: {}".format(repre)) - - if "review" not in tags: - continue - - staging_dir = repre["stagingDir"] - - # iterating preset output profiles - for name, profile in output_profiles.items(): - repre_new = repre.copy() - ext = profile.get("ext", None) - p_tags = profile.get('tags', []) - - # append repre tags into profile tags - for t in tags: - if t not in p_tags: - p_tags.append(t) - - self.log.info("p_tags: `{}`".format(p_tags)) - - # adding control for presets to be sequence - # or single file - is_sequence = ("sequence" in p_tags) and (ext in ( - "png", "jpg", "jpeg")) - - # no handles switch from profile tags - no_handles = "no-handles" in p_tags - - self.log.debug("Profile name: {}".format(name)) - - if not ext: - ext = "mov" - self.log.warning( - str("`ext` attribute not in output " - "profile. Setting to default ext: `mov`")) - - self.log.debug( - "instance.families: {}".format( - instance.data['families'])) - self.log.debug( - "profile.families: {}".format(profile['families'])) - - profile_family_check = False - for _family in profile['families']: - if _family in instance.data['families']: - profile_family_check = True - break - - if not profile_family_check: - continue - - if isinstance(repre["files"], list): - collections, remainder = clique.assemble( - repre["files"]) - - full_input_path = os.path.join( - staging_dir, collections[0].format( - '{head}{padding}{tail}') - ) - - filename = collections[0].format('{head}') - if filename.endswith('.'): - filename = filename[:-1] - else: - full_input_path = os.path.join( - staging_dir, repre["files"]) - filename = repre["files"].split(".")[0] - - repr_file = filename + "_{0}.{1}".format(name, ext) - full_output_path = os.path.join( - staging_dir, repr_file) - - if is_sequence: - filename_base = filename + "_{0}".format(name) - repr_file = filename_base + ".%08d.{0}".format( - ext) - repre_new["sequence_file"] = repr_file - full_output_path = os.path.join( - staging_dir, filename_base, repr_file) - - self.log.info("input {}".format(full_input_path)) - self.log.info("output {}".format(full_output_path)) - - new_tags = [x for x in tags if x != "delete"] - - # add families - [instance.data["families"].append(t) - for t in p_tags - if t not in instance.data["families"]] - - # add to - [new_tags.append(t) for t in p_tags - if t not in new_tags] - - self.log.info("new_tags: `{}`".format(new_tags)) - - input_args = [] - output_args = [] - - # overrides output file - input_args.append("-y") - - # preset's input data - input_args.extend(profile.get('input', [])) - - # necessary input data - # adds start arg only if image sequence - - frame_start_handle = frame_start - handle_start - frame_end_handle = frame_end + handle_end - if isinstance(repre["files"], list): - if frame_start_handle != repre.get( - "detectedStart", frame_start_handle): - frame_start_handle = repre.get("detectedStart") - - # exclude handle if no handles defined - if no_handles: - frame_start_handle = frame_start - frame_end_handle = frame_end - - input_args.append( - "-start_number {0} -framerate {1}".format( - frame_start_handle, fps)) - else: - if no_handles: - start_sec = float(handle_start) / fps - input_args.append("-ss {:0.2f}".format(start_sec)) - frame_start_handle = frame_start - frame_end_handle = frame_end - - input_args.append("-i {}".format(full_input_path)) - - for audio in instance.data.get("audio", []): - offset_frames = ( - instance.data.get("frameStartFtrack") - - audio["offset"] - ) - offset_seconds = offset_frames / fps - - if offset_seconds > 0: - input_args.append("-ss") - else: - input_args.append("-itsoffset") - - input_args.append(str(abs(offset_seconds))) - - input_args.extend( - ["-i", audio["filename"]] - ) - - # Need to merge audio if there are more - # than 1 input. - if len(instance.data["audio"]) > 1: - input_args.extend( - [ - "-filter_complex", - "amerge", - "-ac", - "2" - ] - ) - - codec_args = profile.get('codec', []) - output_args.extend(codec_args) - # preset's output data - output_args.extend(profile.get('output', [])) - - # defining image ratios - resolution_ratio = ( - float(resolution_width) * pixel_aspect) / resolution_height - delivery_ratio = float(self.to_width) / float(self.to_height) - self.log.debug( - "__ resolution_ratio: `{}`".format(resolution_ratio)) - self.log.debug( - "__ delivery_ratio: `{}`".format(delivery_ratio)) - - # get scale factor - scale_factor = float(self.to_height) / ( - resolution_height * pixel_aspect) - - # shorten two decimals long float number for testing conditions - resolution_ratio_test = float( - "{:0.2f}".format(resolution_ratio)) - delivery_ratio_test = float( - "{:0.2f}".format(delivery_ratio)) - - if resolution_ratio_test != delivery_ratio_test: - scale_factor = float(self.to_width) / ( - resolution_width * pixel_aspect) - if int(scale_factor * 100) == 100: - scale_factor = ( - float(self.to_height) / resolution_height - ) - - self.log.debug("__ scale_factor: `{}`".format(scale_factor)) - - # letter_box - lb = profile.get('letter_box', 0) - if lb != 0: - ffmpeg_width = self.to_width - ffmpeg_height = self.to_height - if "reformat" not in p_tags: - lb /= pixel_aspect - if resolution_ratio_test != delivery_ratio_test: - ffmpeg_width = resolution_width - ffmpeg_height = int( - resolution_height * pixel_aspect) - else: - if resolution_ratio_test != delivery_ratio_test: - lb /= scale_factor - else: - lb /= pixel_aspect - - output_args.append(str( - "-filter:v scale={0}x{1}:flags=lanczos," - "setsar=1,drawbox=0:0:iw:" - "round((ih-(iw*(1/{2})))/2):t=fill:" - "c=black,drawbox=0:ih-round((ih-(iw*(" - "1/{2})))/2):iw:round((ih-(iw*(1/{2})))" - "/2):t=fill:c=black").format( - ffmpeg_width, ffmpeg_height, lb)) - - # In case audio is longer than video. - output_args.append("-shortest") - - if no_handles: - duration_sec = float( - frame_end_handle - frame_start_handle + 1) / fps - - output_args.append("-t {:0.2f}".format(duration_sec)) - - # output filename - output_args.append(full_output_path) - - self.log.debug( - "__ pixel_aspect: `{}`".format(pixel_aspect)) - self.log.debug( - "__ resolution_width: `{}`".format( - resolution_width)) - self.log.debug( - "__ resolution_height: `{}`".format( - resolution_height)) - - # scaling none square pixels and 1920 width - if "reformat" in p_tags: - if resolution_ratio_test < delivery_ratio_test: - self.log.debug("lower then delivery") - width_scale = int(self.to_width * scale_factor) - width_half_pad = int(( - self.to_width - width_scale) / 2) - height_scale = self.to_height - height_half_pad = 0 - else: - self.log.debug("heigher then delivery") - width_scale = self.to_width - width_half_pad = 0 - scale_factor = float(self.to_width) / (float( - resolution_width) * pixel_aspect) - self.log.debug( - "__ scale_factor: `{}`".format( - scale_factor)) - height_scale = int( - resolution_height * scale_factor) - height_half_pad = int( - (self.to_height - height_scale) / 2) - - self.log.debug( - "__ width_scale: `{}`".format(width_scale)) - self.log.debug( - "__ width_half_pad: `{}`".format( - width_half_pad)) - self.log.debug( - "__ height_scale: `{}`".format( - height_scale)) - self.log.debug( - "__ height_half_pad: `{}`".format( - height_half_pad)) - - scaling_arg = str( - "scale={0}x{1}:flags=lanczos," - "pad={2}:{3}:{4}:{5}:black,setsar=1" - ).format(width_scale, height_scale, - self.to_width, self.to_height, - width_half_pad, - height_half_pad - ) - - vf_back = self.add_video_filter_args( - output_args, scaling_arg) - # add it to output_args - output_args.insert(0, vf_back) - - # baking lut file application - lut_path = instance.data.get("lutPath") - if lut_path and ("bake-lut" in p_tags): - # removing Gama info as it is all baked in lut - gamma = next((g for g in input_args - if "-gamma" in g), None) - if gamma: - input_args.remove(gamma) - - # create lut argument - lut_arg = "lut3d=file='{}'".format( - lut_path.replace( - "\\", "/").replace(":/", "\\:/") - ) - lut_arg += ",colormatrix=bt601:bt709" - - vf_back = self.add_video_filter_args( - output_args, lut_arg) - # add it to output_args - output_args.insert(0, vf_back) - self.log.info("Added Lut to ffmpeg command") - self.log.debug( - "_ output_args: `{}`".format(output_args)) - - if is_sequence: - stg_dir = os.path.dirname(full_output_path) - - if not os.path.exists(stg_dir): - self.log.debug( - "creating dir: {}".format(stg_dir)) - os.mkdir(stg_dir) - - mov_args = [ - "\"{}\"".format(ffmpeg_path), - " ".join(input_args), - " ".join(output_args) - ] - subprcs_cmd = " ".join(mov_args) - - # run subprocess - self.log.debug("Executing: {}".format(subprcs_cmd)) - openpype.api.run_subprocess( - subprcs_cmd, shell=True, logger=self.log - ) - - # create representation data - repre_new.update({ - 'name': name, - 'ext': ext, - 'files': repr_file, - "tags": new_tags, - "outputName": name, - "codec": codec_args, - "_profile": profile, - "resolutionHeight": resolution_height, - "resolutionWidth": resolution_width, - "frameStartFtrack": frame_start_handle, - "frameEndFtrack": frame_end_handle - }) - if is_sequence: - repre_new.update({ - "stagingDir": stg_dir, - "files": os.listdir(stg_dir) - }) - if no_handles: - repre_new.update({ - "outputName": name + "_noHandles", - "frameStartFtrack": frame_start, - "frameEndFtrack": frame_end - }) - if repre_new.get('preview'): - repre_new.pop("preview") - if repre_new.get('thumbnail'): - repre_new.pop("thumbnail") - - # adding representation - self.log.debug("Adding: {}".format(repre_new)) - representations_new.append(repre_new) - - for repre in representations_new: - if "delete" in repre.get("tags", []): - representations_new.remove(repre) - if "clean_name" in repre.get("tags", []): - repre_new.pop("outputName") - - instance.data.update({ - "reviewToWidth": self.to_width, - "reviewToHeight": self.to_height - }) - - self.log.debug( - "new representations: {}".format(representations_new)) - instance.data["representations"] = representations_new - - self.log.debug("Families Out: `{}`".format(instance.data["families"])) - def add_video_filter_args(self, args, inserting_arg): """ Fixing video filter arguments to be one long string diff --git a/openpype/plugins/publish/extract_scanline_exr.py b/openpype/plugins/publish/extract_scanline_exr.py index 404aa65ac2..a7f7de5188 100644 --- a/openpype/plugins/publish/extract_scanline_exr.py +++ b/openpype/plugins/publish/extract_scanline_exr.py @@ -45,7 +45,7 @@ class ExtractScanlineExr(pyblish.api.InstancePlugin): stagingdir = os.path.normpath(repre.get("stagingDir")) - oiio_tool_path = os.getenv("OPENPYPE_OIIO_PATH", "") + oiio_tool_path = openpype.lib.get_oiio_tools_path() if not os.path.exists(oiio_tool_path): self.log.error( "OIIO tool not found in {}".format(oiio_tool_path)) diff --git a/openpype/plugins/publish/integrate_new.py b/openpype/plugins/publish/integrate_new.py index 0d36828ccf..ab9b85983b 100644 --- a/openpype/plugins/publish/integrate_new.py +++ b/openpype/plugins/publish/integrate_new.py @@ -93,7 +93,8 @@ class IntegrateAssetNew(pyblish.api.InstancePlugin): "harmony.palette", "editorial", "background", - "camerarig" + "camerarig", + "redshiftproxy" ] exclude_families = ["clip"] db_representation_context_keys = [ @@ -976,6 +977,9 @@ class IntegrateAssetNew(pyblish.api.InstancePlugin): local_site = local_site_id remote_site = sync_server_presets["config"].get("remote_site") + if remote_site == local_site: + remote_site = None + if remote_site == 'local': remote_site = local_site_id diff --git a/openpype/scripts/export_maya_ass_job.py b/openpype/scripts/export_maya_ass_job.py index 6e5eff6663..16e841ce96 100644 --- a/openpype/scripts/export_maya_ass_job.py +++ b/openpype/scripts/export_maya_ass_job.py @@ -54,11 +54,11 @@ def __main__(): print("Got Pype location from environment: {}".format( os.environ.get('OPENPYPE_SETUP_PATH'))) - pype_command = "pype.ps1" + pype_command = "openpype.ps1" if platform.system().lower() == "linux": pype_command = "pype" elif platform.system().lower() == "windows": - pype_command = "pype.bat" + pype_command = "openpype.bat" if kwargs.pype: pype_root = kwargs.pype diff --git a/openpype/settings/__init__.py b/openpype/settings/__init__.py index b4187829fc..b5810deef4 100644 --- a/openpype/settings/__init__.py +++ b/openpype/settings/__init__.py @@ -1,9 +1,13 @@ +from .exceptions import ( + SaveWarningExc +) from .lib import ( get_system_settings, get_project_settings, get_current_project_settings, get_anatomy_settings, - get_environments + get_environments, + get_local_settings ) from .entities import ( SystemSettings, @@ -12,11 +16,14 @@ from .entities import ( __all__ = ( + "SaveWarningExc", + "get_system_settings", "get_project_settings", "get_current_project_settings", "get_anatomy_settings", "get_environments", + "get_local_settings", "SystemSettings", "ProjectSettings" diff --git a/openpype/settings/defaults/project_anatomy/attributes.json b/openpype/settings/defaults/project_anatomy/attributes.json index 987021f25b..3ad6761331 100644 --- a/openpype/settings/defaults/project_anatomy/attributes.json +++ b/openpype/settings/defaults/project_anatomy/attributes.json @@ -10,17 +10,17 @@ "resolutionHeight": 1080, "pixelAspect": 1.0, "applications": [ - "maya_2020", - "nuke_12-2", - "nukex_12-2", - "hiero_12-2", - "resolve_16", - "houdini_18-5", - "blender_2-90", - "harmony_20", - "photoshop_2021", - "aftereffects_2021", - "unreal_4-24" + "maya/2020", + "nuke/12-2", + "nukex/12-2", + "hiero/12-2", + "resolve/16", + "houdini/18-5", + "blender/2-91", + "harmony/20", + "photoshop/2021", + "aftereffects/2021", + "unreal/4-24" ], "tools_env": [] } \ No newline at end of file diff --git a/openpype/settings/defaults/project_anatomy/imageio.json b/openpype/settings/defaults/project_anatomy/imageio.json index 4e98463ee4..ff16c22663 100644 --- a/openpype/settings/defaults/project_anatomy/imageio.json +++ b/openpype/settings/defaults/project_anatomy/imageio.json @@ -25,6 +25,9 @@ } }, "nuke": { + "viewer": { + "viewerProcess": "sRGB" + }, "workfile": { "colorManagement": "Nuke", "OCIO_config": "nuke-default", @@ -102,7 +105,7 @@ }, { "name": "tile_color", - "value": "0xff0000ff" + "value": "0xadab1dff" }, { "name": "channels", diff --git a/openpype/settings/defaults/project_settings/deadline.json b/openpype/settings/defaults/project_settings/deadline.json index 9ff551491c..905ba68d60 100644 --- a/openpype/settings/defaults/project_settings/deadline.json +++ b/openpype/settings/defaults/project_settings/deadline.json @@ -21,7 +21,8 @@ "secondary_pool": "", "group": "", "department": "", - "limit_groups": {} + "limit_groups": {}, + "use_gpu": true }, "HarmonySubmitDeadline": { "enabled": true, diff --git a/openpype/settings/defaults/project_settings/ftrack.json b/openpype/settings/defaults/project_settings/ftrack.json index 03ac8f309f..8970aa8ac8 100644 --- a/openpype/settings/defaults/project_settings/ftrack.json +++ b/openpype/settings/defaults/project_settings/ftrack.json @@ -7,6 +7,14 @@ "not ready" ] }, + "prepare_project": { + "enabled": true, + "role_list": [ + "Pypeclub", + "Administrator", + "Project manager" + ] + }, "sync_hier_entity_attributes": { "enabled": true, "interest_entity_types": [ @@ -195,7 +203,7 @@ "publish": { "IntegrateFtrackNote": { "enabled": true, - "note_with_intent_template": "", + "note_with_intent_template": "{intent}: {comment}", "note_labels": [] }, "ValidateFtrackAttributes": { diff --git a/openpype/settings/defaults/project_settings/global.json b/openpype/settings/defaults/project_settings/global.json index 8081f92ef7..61db35ba79 100644 --- a/openpype/settings/defaults/project_settings/global.json +++ b/openpype/settings/defaults/project_settings/global.json @@ -6,7 +6,9 @@ "ExtractJpegEXR": { "enabled": true, "ffmpeg_args": { - "input": [], + "input": [ + "-gamma 2.2" + ], "output": [] } }, @@ -24,11 +26,11 @@ "ftrackreview" ], "ffmpeg_args": { - "video_filters": [], - "audio_filters": [], - "input": [ - "-gamma 2.2" + "video_filters": [ + "eq=gamma=2.2" ], + "audio_filters": [], + "input": [], "output": [ "-pix_fmt yuv420p", "-crf 18", @@ -43,7 +45,25 @@ ] }, "width": 0, - "height": 0 + "height": 0, + "letter_box": { + "enabled": false, + "ratio": 0.0, + "state": "letterbox", + "fill_color": [ + 0, + 0, + 0, + 255 + ], + "line_thickness": 0, + "line_color": [ + 255, + 0, + 0, + 255 + ] + } } } } diff --git a/openpype/settings/defaults/project_settings/harmony.json b/openpype/settings/defaults/project_settings/harmony.json index e1de4485a4..f5f084dd44 100644 --- a/openpype/settings/defaults/project_settings/harmony.json +++ b/openpype/settings/defaults/project_settings/harmony.json @@ -14,7 +14,7 @@ "priority": 50, "primary_pool": "", "secondary_pool": "", - "chunk_size": 0 + "chunk_size": 1000000 } } } \ No newline at end of file diff --git a/openpype/settings/defaults/project_settings/hiero.json b/openpype/settings/defaults/project_settings/hiero.json index b69bc66457..a8d6472c47 100644 --- a/openpype/settings/defaults/project_settings/hiero.json +++ b/openpype/settings/defaults/project_settings/hiero.json @@ -17,6 +17,18 @@ "handleEnd": 10 } }, + "load": { + "LoadClip": { + "enabled": true, + "families": [ + "render2d", "source", "plate", "render", "review" + ], + "representations": [ + "exr", "dpx", "jpg", "jpeg", "png", "h264", "mov" + ], + "clip_name_template": "{asset}_{subset}_{representation}" + } + }, "publish": { "CollectInstanceVersion": { "enabled": false diff --git a/openpype/settings/defaults/project_settings/maya.json b/openpype/settings/defaults/project_settings/maya.json index feddd2860a..8600e49518 100644 --- a/openpype/settings/defaults/project_settings/maya.json +++ b/openpype/settings/defaults/project_settings/maya.json @@ -8,6 +8,13 @@ "yetiRig": "ma" }, "create": { + "CreateLook": { + "enabled": true, + "make_tx": true, + "defaults": [ + "Main" + ] + }, "CreateAnimation": { "enabled": true, "defaults": [ @@ -38,12 +45,6 @@ "Main" ] }, - "CreateLook": { - "enabled": true, - "defaults": [ - "Main" - ] - }, "CreateMayaScene": { "enabled": true, "defaults": [ @@ -313,8 +314,8 @@ "rendererName": "vp2Renderer" }, "Resolution": { - "width": 1080, - "height": 1920, + "width": 1920, + "height": 1080, "percent": 1.0, "mode": "Custom" }, @@ -381,10 +382,6 @@ "optional": true, "active": true, "bake_attributes": [] - }, - "MayaSubmitDeadline": { - "enabled": true, - "tile_assembler_plugin": "DraftTileAssembler" } }, "load": { diff --git a/openpype/settings/defaults/project_settings/nuke.json b/openpype/settings/defaults/project_settings/nuke.json index 0173eb0a82..bb5232cea7 100644 --- a/openpype/settings/defaults/project_settings/nuke.json +++ b/openpype/settings/defaults/project_settings/nuke.json @@ -6,7 +6,9 @@ "load": "ctrl+alt+l", "manage": "ctrl+alt+m", "build_workfile": "ctrl+alt+b" - } + }, + "open_workfile_at_start": false, + "create_initial_workfile": true }, "create": { "CreateWriteRender": { @@ -84,12 +86,6 @@ }, "ExtractSlateFrame": { "viewer_lut_raw": false - }, - "NukeSubmitDeadline": { - "deadline_priority": 50, - "deadline_pool": "", - "deadline_pool_secondary": "", - "deadline_chunk_size": 1 } }, "load": { diff --git a/openpype/settings/defaults/project_settings/standalonepublisher.json b/openpype/settings/defaults/project_settings/standalonepublisher.json index 08895bcba9..9d40d2ded6 100644 --- a/openpype/settings/defaults/project_settings/standalonepublisher.json +++ b/openpype/settings/defaults/project_settings/standalonepublisher.json @@ -116,7 +116,7 @@ "ExtractThumbnailSP": { "ffmpeg_args": { "input": [ - "gamma 2.2" + "-gamma 2.2" ], "output": [] } diff --git a/openpype/settings/defaults/project_settings/tvpaint.json b/openpype/settings/defaults/project_settings/tvpaint.json new file mode 100644 index 0000000000..4a424b1c03 --- /dev/null +++ b/openpype/settings/defaults/project_settings/tvpaint.json @@ -0,0 +1,20 @@ +{ + "publish": { + "ValidateProjectSettings": { + "enabled": true, + "optional": true, + "active": true + }, + "ValidateMarks": { + "enabled": true, + "optional": true, + "active": true + }, + "ValidateAssetName": { + "enabled": true, + "optional": true, + "active": true + } + }, + "filters": {} +} \ No newline at end of file diff --git a/openpype/settings/defaults/system_settings/applications.json b/openpype/settings/defaults/system_settings/applications.json index e7c505f184..56d63ecf09 100644 --- a/openpype/settings/defaults/system_settings/applications.json +++ b/openpype/settings/defaults/system_settings/applications.json @@ -6,9 +6,9 @@ "host_name": "maya", "environment": { "PYTHONPATH": [ - "{OPENPYPE_ROOT}/pype/hosts/maya/startup", - "{OPENPYPE_ROOT}/repos/avalon-core/setup/maya", - "{OPENPYPE_ROOT}/repos/maya-look-assigner", + "{OPENPYPE_REPOS_ROOT}/openpype/hosts/maya/startup", + "{OPENPYPE_REPOS_ROOT}/repos/avalon-core/setup/maya", + "{OPENPYPE_REPOS_ROOT}/repos/maya-look-assigner", "{PYTHONPATH}" ], "MAYA_DISABLE_CLIC_IPM": "Yes", @@ -19,9 +19,28 @@ "OPENPYPE_LOG_NO_COLORS": "Yes" }, "variants": { - "maya_2020": { - "enabled": true, - "variant_label": "2020", + "2022": { + "use_python_2": false, + "executables": { + "windows": [ + "C:\\Program Files\\Autodesk\\Maya2022\\bin\\maya.exe" + ], + "darwin": [], + "linux": [ + "/usr/autodesk/maya2022/bin/maya" + ] + }, + "arguments": { + "windows": [], + "darwin": [], + "linux": [] + }, + "environment": { + "MAYA_VERSION": "2022" + } + }, + "2020": { + "use_python_2": true, "executables": { "windows": [ "C:\\Program Files\\Autodesk\\Maya2020\\bin\\maya.exe" @@ -40,9 +59,8 @@ "MAYA_VERSION": "2020" } }, - "maya_2019": { - "enabled": true, - "variant_label": "2019", + "2019": { + "use_python_2": true, "executables": { "windows": [ "C:\\Program Files\\Autodesk\\Maya2019\\bin\\maya.exe" @@ -61,9 +79,8 @@ "MAYA_VERSION": "2019" } }, - "maya_2018": { - "enabled": true, - "variant_label": "2018", + "2018": { + "use_python_2": true, "executables": { "windows": [ "C:\\Program Files\\Autodesk\\Maya2018\\bin\\maya.exe" @@ -81,86 +98,9 @@ "environment": { "MAYA_VERSION": "2018" } - } - } - }, - "mayabatch": { - "enabled": true, - "label": "MayaBatch", - "icon": "{}/app_icons/maya.png", - "host_name": "maya", - "environment": { - "PYTHONPATH": [ - "{OPENPYPE_ROOT}/avalon-core/setup/maya", - "{OPENPYPE_ROOT}/maya-look-assigner", - "{PYTHON_ENV}/python2/Lib/site-packages", - "{PYTHONPATH}" - ], - "MAYA_DISABLE_CLIC_IPM": "Yes", - "MAYA_DISABLE_CIP": "Yes", - "MAYA_DISABLE_CER": "Yes", - "PYMEL_SKIP_MEL_INIT": "Yes", - "LC_ALL": "C", - "OPENPYPE_LOG_NO_COLORS": "Yes", - "MAYA_TEST": "{MAYA_VERSION}" - }, - "variants": { - "mayabatch_2020": { - "enabled": true, - "variant_label": "2020", - "executables": { - "windows": [ - "C:\\Program Files\\Autodesk\\Maya2020\\bin\\mayabatch.exe" - ], - "darwin": [], - "linux": [] - }, - "arguments": { - "windows": [], - "darwin": [], - "linux": [] - }, - "environment": { - "MAYA_VERSION": "2020" - } }, - "mayabatch_2019": { - "enabled": true, - "variant_label": "2019", - "executables": { - "windows": [ - "C:\\Program Files\\Autodesk\\Maya2019\\bin\\mayabatch.exe" - ], - "darwin": [], - "linux": [] - }, - "arguments": { - "windows": [], - "darwin": [], - "linux": [] - }, - "environment": { - "MAYA_VERSION": "2019" - } - }, - "mayabatch_2018": { - "enabled": true, - "variant_label": "2018", - "executables": { - "windows": [ - "C:\\Program Files\\Autodesk\\Maya2018\\bin\\mayabatch.exe" - ], - "darwin": [], - "linux": [] - }, - "arguments": { - "windows": [], - "darwin": [], - "linux": [] - }, - "environment": { - "MAYA_VERSION": "2018" - } + "__dynamic_keys_labels__": { + "2022": "2022 (Testing Only)" } } }, @@ -171,8 +111,8 @@ "host_name": "nuke", "environment": { "NUKE_PATH": [ - "{OPENPYPE_ROOT}/repos/avalon-core/setup/nuke/nuke_path", - "{OPENPYPE_ROOT}/openpype/hosts/nuke/startup", + "{OPENPYPE_REPOS_ROOT}/repos/avalon-core/setup/nuke/nuke_path", + "{OPENPYPE_REPOS_ROOT}/openpype/hosts/nuke/startup", "{OPENPYPE_STUDIO_PLUGINS}/nuke" ], "PATH": { @@ -181,9 +121,26 @@ "LOGLEVEL": "DEBUG" }, "variants": { - "nuke_12-2": { - "enabled": true, - "variant_label": "12.2", + "13-0": { + "use_python_2": false, + "executables": { + "windows": [ + "C:\\Program Files\\Nuke13.0v1\\Nuke13.0.exe" + ], + "darwin": [], + "linux": [ + "/usr/local/Nuke13.0v1/Nuke13.0" + ] + }, + "arguments": { + "windows": [], + "darwin": [], + "linux": [] + }, + "environment": {} + }, + "12-2": { + "use_python_2": true, "executables": { "windows": [ "C:\\Program Files\\Nuke12.2v3\\Nuke12.2.exe" @@ -200,9 +157,8 @@ }, "environment": {} }, - "nuke_12-0": { - "enabled": true, - "variant_label": "12.0", + "12-0": { + "use_python_2": true, "executables": { "windows": [ "C:\\Program Files\\Nuke12.0v1\\Nuke12.0.exe" @@ -219,9 +175,8 @@ }, "environment": {} }, - "nuke_11-3": { - "enabled": true, - "variant_label": "11.3", + "11-3": { + "use_python_2": true, "executables": { "windows": [ "C:\\Program Files\\Nuke11.3v1\\Nuke11.3.exe" @@ -238,9 +193,8 @@ }, "environment": {} }, - "nuke_11-2": { - "enabled": true, - "variant_label": "11.2", + "11-2": { + "use_python_2": true, "executables": { "windows": [ "C:\\Program Files\\Nuke11.2v2\\Nuke11.2.exe" @@ -254,6 +208,13 @@ "linux": [] }, "environment": {} + }, + "__dynamic_keys_labels__": { + "12-2": "12.2", + "12-0": "12.0", + "11-3": "11.3", + "11-2": "11.2", + "13-0": "13.0 (Testing only)" } } }, @@ -264,8 +225,8 @@ "host_name": "nuke", "environment": { "NUKE_PATH": [ - "{OPENPYPE_ROOT}/repos/avalon-core/setup/nuke/nuke_path", - "{OPENPYPE_ROOT}/openpype/hosts/nuke/startup", + "{OPENPYPE_REPOS_ROOT}/repos/avalon-core/setup/nuke/nuke_path", + "{OPENPYPE_REPOS_ROOT}/openpype/hosts/nuke/startup", "{OPENPYPE_STUDIO_PLUGINS}/nuke" ], "PATH": { @@ -274,9 +235,32 @@ "LOGLEVEL": "DEBUG" }, "variants": { - "nukex_12-2": { - "enabled": true, - "variant_label": "12.2", + "13-0": { + "use_python_2": false, + "executables": { + "windows": [ + "C:\\Program Files\\Nuke13.0v1\\Nuke13.0.exe" + ], + "darwin": [], + "linux": [ + "/usr/local/Nuke13.0v1/Nuke13.0" + ] + }, + "arguments": { + "windows": [ + "--nukex" + ], + "darwin": [ + "--nukex" + ], + "linux": [ + "--nukex" + ] + }, + "environment": {} + }, + "12-2": { + "use_python_2": true, "executables": { "windows": [ "C:\\Program Files\\Nuke12.2v3\\Nuke12.2.exe" @@ -299,9 +283,8 @@ }, "environment": {} }, - "nukex_12-0": { - "enabled": true, - "variant_label": "12.0", + "12-0": { + "use_python_2": true, "executables": { "windows": [ "C:\\Program Files\\Nuke12.0v1\\Nuke12.0.exe" @@ -324,9 +307,8 @@ }, "environment": {} }, - "nukex_11-3": { - "enabled": true, - "variant_label": "11.3", + "11-3": { + "use_python_2": true, "executables": { "windows": [ "C:\\Program Files\\Nuke11.3v1\\Nuke11.3.exe" @@ -349,9 +331,8 @@ }, "environment": {} }, - "nukex_11-2": { - "enabled": true, - "variant_label": "11.2", + "11-2": { + "use_python_2": true, "executables": { "windows": [ "C:\\Program Files\\Nuke11.2v2\\Nuke11.2.exe" @@ -371,6 +352,13 @@ ] }, "environment": {} + }, + "__dynamic_keys_labels__": { + "12-2": "12.2", + "12-0": "12.0", + "11-3": "11.3", + "11-2": "11.2", + "13-0": "13.0 (Testing only)" } } }, @@ -381,7 +369,7 @@ "host_name": "hiero", "environment": { "HIERO_PLUGIN_PATH": [ - "{OPENPYPE_ROOT}/openpype/hosts/hiero/startup" + "{OPENPYPE_REPOS_ROOT}/openpype/hosts/hiero/startup" ], "PATH": { "windows": "C:/Program Files (x86)/QuickTime/QTSystem/;{PATH}" @@ -391,9 +379,32 @@ "LOGLEVEL": "DEBUG" }, "variants": { - "nukestudio_12-2": { - "enabled": true, - "variant_label": "12.2", + "13-0": { + "use_python_2": false, + "executables": { + "windows": [ + "C:\\Program Files\\Nuke13.0v1\\Nuke13.0.exe" + ], + "darwin": [], + "linux": [ + "/usr/local/Nuke13.0v1/Nuke13.0" + ] + }, + "arguments": { + "windows": [ + "--studio" + ], + "darwin": [ + "--studio" + ], + "linux": [ + "--studio" + ] + }, + "environment": {} + }, + "12-2": { + "use_python_2": true, "executables": { "windows": [ "C:\\Program Files\\Nuke12.2v3\\Nuke12.2.exe" @@ -416,9 +427,8 @@ }, "environment": {} }, - "nukestudio_12-0": { - "enabled": true, - "variant_label": "12.0", + "12-0": { + "use_python_2": true, "executables": { "windows": [ "C:\\Program Files\\Nuke12.0v1\\Nuke12.0.exe" @@ -441,9 +451,8 @@ }, "environment": {} }, - "nukestudio_11-3": { - "enabled": true, - "variant_label": "11.3", + "11-3": { + "use_python_2": true, "executables": { "windows": [ "C:\\Program Files\\Nuke11.3v1\\Nuke11.3.exe" @@ -466,9 +475,8 @@ }, "environment": {} }, - "nukestudio_11-2": { - "enabled": true, - "variant_label": "11.2", + "11-2": { + "use_python_2": true, "executables": { "windows": [], "darwin": [], @@ -486,6 +494,13 @@ ] }, "environment": {} + }, + "__dynamic_keys_labels__": { + "12-2": "12.2", + "12-0": "12.0", + "11-3": "11.3", + "11-2": "11.2", + "13-0": "13.0 (Testing only)" } } }, @@ -496,7 +511,7 @@ "host_name": "hiero", "environment": { "HIERO_PLUGIN_PATH": [ - "{OPENPYPE_ROOT}/openpype/hosts/hiero/startup" + "{OPENPYPE_REPOS_ROOT}/openpype/hosts/hiero/startup" ], "PATH": { "windows": "C:/Program Files (x86)/QuickTime/QTSystem/;{PATH}" @@ -506,9 +521,32 @@ "LOGLEVEL": "DEBUG" }, "variants": { - "hiero_12-2": { - "enabled": true, - "variant_label": "12.2", + "13-0": { + "use_python_2": false, + "executables": { + "windows": [ + "C:\\Program Files\\Nuke13.0v1\\Nuke13.0.exe" + ], + "darwin": [], + "linux": [ + "/usr/local/Nuke13.0v1/Nuke13.0" + ] + }, + "arguments": { + "windows": [ + "--hiero" + ], + "darwin": [ + "--hiero" + ], + "linux": [ + "--hiero" + ] + }, + "environment": {} + }, + "12-2": { + "use_python_2": true, "executables": { "windows": [ "C:\\Program Files\\Nuke12.2v3\\Nuke12.2.exe" @@ -531,9 +569,8 @@ }, "environment": {} }, - "hiero_12-0": { - "enabled": true, - "variant_label": "12.0", + "12-0": { + "use_python_2": true, "executables": { "windows": [ "C:\\Program Files\\Nuke12.0v1\\Nuke12.0.exe" @@ -556,9 +593,8 @@ }, "environment": {} }, - "hiero_11-3": { - "enabled": true, - "variant_label": "11.3", + "11-3": { + "use_python_2": true, "executables": { "windows": [ "C:\\Program Files\\Nuke11.3v1\\Nuke11.3.exe" @@ -581,9 +617,8 @@ }, "environment": {} }, - "hiero_11-2": { - "enabled": true, - "variant_label": "11.2", + "11-2": { + "use_python_2": true, "executables": { "windows": [ "C:\\Program Files\\Nuke11.2v2\\Nuke11.2.exe" @@ -603,6 +638,13 @@ ] }, "environment": {} + }, + "__dynamic_keys_labels__": { + "12-2": "12.2", + "12-0": "12.0", + "11-3": "11.3", + "11-2": "11.2", + "13-0": "13.0 (Testing only)" } } }, @@ -636,9 +678,10 @@ "OPENPYPE_LOG_NO_COLORS": "Yes" }, "variants": { - "fusion_16": { + "16": { "enabled": true, "variant_label": "16", + "use_python_2": false, "executables": { "windows": [], "darwin": [], @@ -651,9 +694,10 @@ }, "environment": {} }, - "fusion_9": { + "9": { "enabled": true, "variant_label": "9", + "use_python_2": false, "executables": { "windows": [ "C:\\Program Files\\Blackmagic Design\\Fusion 9\\Fusion.exe" @@ -709,14 +753,15 @@ "{PYTHON36_RESOLVE}/Scripts", "{PATH}" ], - "PRE_PYTHON_SCRIPT": "{OPENPYPE_ROOT}/openpype/resolve/preload_console.py", + "PRE_PYTHON_SCRIPT": "{OPENPYPE_REPOS_ROOT}/openpype/resolve/preload_console.py", "OPENPYPE_LOG_NO_COLORS": "True", "RESOLVE_DEV": "True" }, "variants": { - "resolve_16": { + "16": { "enabled": true, "variant_label": "16", + "use_python_2": false, "executables": { "windows": [ "C:/Program Files/Blackmagic Design/DaVinci Resolve/Resolve.exe" @@ -740,20 +785,19 @@ "host_name": "houdini", "environment": { "HOUDINI_PATH": { - "darwin": "{OPENPYPE_ROOT}/openpype/hosts/houdini/startup:&", - "linux": "{OPENPYPE_ROOT}/openpype/hosts/houdini/startup:&", - "windows": "{OPENPYPE_ROOT}/openpype/hosts/houdini/startup;&" + "darwin": "{OPENPYPE_REPOS_ROOT}/openpype/hosts/houdini/startup:&", + "linux": "{OPENPYPE_REPOS_ROOT}/openpype/hosts/houdini/startup:&", + "windows": "{OPENPYPE_REPOS_ROOT}/openpype/hosts/houdini/startup;&" }, "HOUDINI_MENU_PATH": { - "darwin": "{OPENPYPE_ROOT}/openpype/hosts/houdini/startup:&", - "linux": "{OPENPYPE_ROOT}/openpype/hosts/houdini/startup:&", - "windows": "{OPENPYPE_ROOT}/openpype/hosts/houdini/startup;&" + "darwin": "{OPENPYPE_REPOS_ROOT}/openpype/hosts/houdini/startup:&", + "linux": "{OPENPYPE_REPOS_ROOT}/openpype/hosts/houdini/startup:&", + "windows": "{OPENPYPE_REPOS_ROOT}/openpype/hosts/houdini/startup;&" } }, "variants": { - "houdini_18-5": { - "enabled": true, - "variant_label": "18.5", + "18-5": { + "use_python_2": true, "executables": { "windows": [ "C:\\Program Files\\Side Effects Software\\Houdini 18.5.499\\bin\\houdini.exe" @@ -768,9 +812,8 @@ }, "environment": {} }, - "houdini_18": { - "enabled": true, - "variant_label": "18", + "18": { + "use_python_2": true, "executables": { "windows": [], "darwin": [], @@ -783,9 +826,8 @@ }, "environment": {} }, - "houdini_17": { - "enabled": true, - "variant_label": "17", + "17": { + "use_python_2": true, "executables": { "windows": [], "darwin": [], @@ -797,6 +839,11 @@ "linux": [] }, "environment": {} + }, + "__dynamic_keys_labels__": { + "18-5": "18.5", + "18": "18", + "17": "17" } } }, @@ -806,17 +853,16 @@ "icon": "{}/app_icons/blender.png", "host_name": "blender", "environment": { - "BLENDER_USER_SCRIPTS": "{OPENPYPE_ROOT}/repos/avalon-core/setup/blender", + "BLENDER_USER_SCRIPTS": "{OPENPYPE_REPOS_ROOT}/repos/avalon-core/setup/blender", "PYTHONPATH": [ - "{OPENPYPE_ROOT}/repos/avalon-core/setup/blender", + "{OPENPYPE_REPOS_ROOT}/repos/avalon-core/setup/blender", "{PYTHONPATH}" ], - "CREATE_NEW_CONSOLE": "yes" + "QT_PREFERRED_BINDING": "PySide2" }, "variants": { - "blender_2-83": { - "enabled": true, - "variant_label": "2.83", + "2-83": { + "use_python_2": false, "executables": { "windows": [ "C:\\Program Files\\Blender Foundation\\Blender 2.83\\blender.exe" @@ -837,9 +883,8 @@ }, "environment": {} }, - "blender_2-90": { - "enabled": true, - "variant_label": "2.90", + "2-90": { + "use_python_2": false, "executables": { "windows": [ "C:\\Program Files\\Blender Foundation\\Blender 2.90\\blender.exe" @@ -859,6 +904,33 @@ ] }, "environment": {} + }, + "2-91": { + "use_python_2": false, + "executables": { + "windows": [ + "C:\\Program Files\\Blender Foundation\\Blender 2.91\\blender.exe" + ], + "darwin": [], + "linux": [] + }, + "arguments": { + "windows": [ + "--python-use-system-env" + ], + "darwin": [ + "--python-use-system-env" + ], + "linux": [ + "--python-use-system-env" + ] + }, + "environment": {} + }, + "__dynamic_keys_labels__": { + "2-83": "2.83", + "2-90": "2.90", + "2-91": "2.91" } } }, @@ -869,12 +941,13 @@ "host_name": "harmony", "environment": { "AVALON_HARMONY_WORKFILES_ON_LAUNCH": "1", - "LIB_OPENHARMONY_PATH": "{OPENPYPE_ROOT}/pype/vendor/OpenHarmony" + "LIB_OPENHARMONY_PATH": "{OPENPYPE_REPOS_ROOT}/pype/vendor/OpenHarmony" }, "variants": { - "harmony_20": { + "20": { "enabled": true, "variant_label": "20", + "use_python_2": false, "executables": { "windows": [], "darwin": [], @@ -887,9 +960,10 @@ }, "environment": {} }, - "harmony_17": { + "17": { "enabled": true, "variant_label": "17", + "use_python_2": false, "executables": { "windows": [], "darwin": [ @@ -915,9 +989,8 @@ "OPENPYPE_LOG_NO_COLORS": "True" }, "variants": { - "tvpaint_animation_11-64bits": { - "enabled": true, - "variant_label": "11 (64bits)", + "animation_11-64bits": { + "use_python_2": false, "executables": { "windows": [ "C:\\Program Files\\TVPaint Developpement\\TVPaint Animation 11 (64bits)\\TVPaint Animation 11 (64bits).exe" @@ -932,9 +1005,8 @@ }, "environment": {} }, - "tvpaint_animation_11-32bits": { - "enabled": true, - "variant_label": "11 (32bits)", + "animation_11-32bits": { + "use_python_2": false, "executables": { "windows": [ "C:\\Program Files (x86)\\TVPaint Developpement\\TVPaint Animation 11 (32bits)\\TVPaint Animation 11 (32bits).exe" @@ -948,6 +1020,10 @@ "linux": [] }, "environment": {} + }, + "__dynamic_keys_labels__": { + "animation_11-64bits": "11 (64bits)", + "animation_11-32bits": "11 (32bits)" } } }, @@ -963,9 +1039,10 @@ "WORKFILES_SAVE_AS": "Yes" }, "variants": { - "photoshop_2020": { + "2020": { "enabled": true, "variant_label": "2020", + "use_python_2": false, "executables": { "windows": [ "C:\\Program Files\\Adobe\\Adobe Photoshop 2020\\Photoshop.exe" @@ -980,9 +1057,10 @@ }, "environment": {} }, - "photoshop_2021": { + "2021": { "enabled": true, "variant_label": "2021", + "use_python_2": false, "executables": { "windows": [ "C:\\Program Files\\Adobe\\Adobe Photoshop 2021\\Photoshop.exe" @@ -1011,9 +1089,10 @@ "WORKFILES_SAVE_AS": "Yes" }, "variants": { - "aftereffects_2020": { + "2020": { "enabled": true, "variant_label": "2020", + "use_python_2": false, "executables": { "windows": [ "" @@ -1028,9 +1107,10 @@ }, "environment": {} }, - "aftereffects_2021": { + "2021": { "enabled": true, "variant_label": "2021", + "use_python_2": false, "executables": { "windows": [ "C:\\Program Files\\Adobe\\Adobe After Effects 2021\\Support Files\\AfterFX.exe" @@ -1053,13 +1133,18 @@ "icon": "app_icons/celaction.png", "host_name": "celaction", "environment": { - "CELACTION_TEMPLATE": "{OPENPYPE_ROOT}/openpype/hosts/celaction/celaction_template_scene.scn" + "CELACTION_TEMPLATE": "{OPENPYPE_REPOS_ROOT}/openpype/hosts/celaction/celaction_template_scene.scn" }, "variants": { - "celation_Local": { + "local": { "enabled": true, "variant_label": "Local", - "executables": "", + "use_python_2": false, + "executables": { + "windows": [], + "darwin": [], + "linux": [] + }, "arguments": { "windows": [], "darwin": [], @@ -1075,14 +1160,12 @@ "icon": "{}/app_icons/ue4.png'", "host_name": "unreal", "environment": { - "AVALON_UNREAL_PLUGIN": "{OPENPYPE_ROOT}/repos/avalon-unreal-integration", - "OPENPYPE_LOG_NO_COLORS": "True", - "QT_PREFERRED_BINDING": "PySide" + "AVALON_UNREAL_PLUGIN": "{OPENPYPE_REPOS_ROOT}/repos/avalon-unreal-integration", + "OPENPYPE_LOG_NO_COLORS": "True" }, "variants": { - "unreal_4-24": { - "enabled": true, - "variant_label": "4.24", + "4-26": { + "use_python_2": false, "executables": { "windows": [], "darwin": [], @@ -1101,9 +1184,8 @@ "enabled": true, "environment": {}, "variants": { - "python_python_3-7": { - "enabled": true, - "variant_label": "3.7", + "python_3-7": { + "use_python_2": true, "executables": { "windows": [], "darwin": [], @@ -1116,9 +1198,8 @@ }, "environment": {} }, - "python_python_2-7": { - "enabled": true, - "variant_label": "2.7", + "python_2-7": { + "use_python_2": true, "executables": { "windows": [], "darwin": [], @@ -1131,9 +1212,8 @@ }, "environment": {} }, - "terminal_terminal": { - "enabled": true, - "variant_label": "", + "terminal": { + "use_python_2": true, "executables": { "windows": [], "darwin": [], @@ -1145,6 +1225,10 @@ "linux": [] }, "environment": {} + }, + "__dynamic_keys_labels__": { + "python_3-7": "Python 3.7", + "python_2-7": "Python 2.7" } } }, @@ -1155,9 +1239,8 @@ "host_name": "", "environment": {}, "variants": { - "djvview_1-1": { - "enabled": true, - "variant_label": "1.1", + "1-1": { + "use_python_2": false, "executables": { "windows": [], "darwin": [], @@ -1169,6 +1252,9 @@ "linux": [] }, "environment": {} + }, + "__dynamic_keys_labels__": { + "1-1": "1.1" } } } diff --git a/openpype/settings/defaults/system_settings/general.json b/openpype/settings/defaults/system_settings/general.json index d93d2a0c3a..2568e8b6a8 100644 --- a/openpype/settings/defaults/system_settings/general.json +++ b/openpype/settings/defaults/system_settings/general.json @@ -2,15 +2,9 @@ "studio_name": "Studio name", "studio_code": "stu", "environment": { - "FFMPEG_PATH": { - "windows": "{OPENPYPE_ROOT}/vendor/bin/ffmpeg_exec/windows/bin", - "darwin": "{OPENPYPE_ROOT}/vendor/bin/ffmpeg_exec/darwin/bin", - "linux": ":{OPENPYPE_ROOT}/vendor/bin/ffmpeg_exec/linux" - }, "OPENPYPE_OCIO_CONFIG": "{STUDIO_SOFT}/OpenColorIO-Configs", "__environment_keys__": { "global": [ - "FFMPEG_PATH", "OPENPYPE_OCIO_CONFIG" ] } diff --git a/openpype/settings/defaults/system_settings/modules.json b/openpype/settings/defaults/system_settings/modules.json index 00e98aa8de..6e4b493116 100644 --- a/openpype/settings/defaults/system_settings/modules.json +++ b/openpype/settings/defaults/system_settings/modules.json @@ -126,6 +126,7 @@ }, "timers_manager": { "enabled": true, + "auto_stop": true, "full_time": 15.0, "message_time": 0.5 }, @@ -160,13 +161,7 @@ "log_viewer": { "enabled": true }, - "user": { - "enabled": true - }, "standalonepublish_tool": { "enabled": true - }, - "idle_manager": { - "enabled": true } } \ No newline at end of file diff --git a/openpype/settings/entities/dict_mutable_keys_entity.py b/openpype/settings/entities/dict_mutable_keys_entity.py index cbc80b6409..19eb83072e 100644 --- a/openpype/settings/entities/dict_mutable_keys_entity.py +++ b/openpype/settings/entities/dict_mutable_keys_entity.py @@ -226,7 +226,16 @@ class DictMutableKeysEntity(EndpointEntity): self.is_group = True def schema_validations(self): + # Allow to have not set label if keys are collapsible + # - this it to bypass label validation + used_temp_label = False + if self.is_group and not self.label and self.collapsible_key: + used_temp_label = True + self.label = "LABEL" + super(DictMutableKeysEntity, self).schema_validations() + if used_temp_label: + self.label = None if not self.schema_data.get("object_type"): reason = ( @@ -268,21 +277,24 @@ class DictMutableKeysEntity(EndpointEntity): self.on_change() - def _metadata_for_current_state(self): + def _get_metadata_for_state(self, state): if ( - self._override_state is OverrideState.PROJECT + state is OverrideState.PROJECT and self._project_override_value is not NOT_SET ): return self._project_override_metadata if ( - self._override_state >= OverrideState.STUDIO + state >= OverrideState.STUDIO and self._studio_override_value is not NOT_SET ): return self._studio_override_metadata return self._default_metadata + def _metadata_for_current_state(self): + return self._get_metadata_for_state(self._override_state) + def set_override_state(self, state): # Trigger override state change of root if is not same if self.root_item.override_state is not state: @@ -510,6 +522,9 @@ class DictMutableKeysEntity(EndpointEntity): self.had_project_override = value is not NOT_SET def _discard_changes(self, on_change_trigger): + if not self.can_discard_changes: + return + self.set_override_state(self._override_state) on_change_trigger.append(self.on_change) @@ -518,6 +533,9 @@ class DictMutableKeysEntity(EndpointEntity): self.on_change() def _remove_from_studio_default(self, on_change_trigger): + if not self.can_remove_from_studio_default: + return + value = self._default_value if value is NOT_SET: value = self.value_on_not_set @@ -527,13 +545,23 @@ class DictMutableKeysEntity(EndpointEntity): # Simulate `clear` method without triggering value change for key in tuple(self.children_by_key.keys()): - child_obj = self.children_by_key.pop(key) + self.children_by_key.pop(key) + + metadata = self._get_metadata_for_state(OverrideState.DEFAULTS) + metadata_labels = metadata.get(M_DYNAMIC_KEY_LABEL) or {} + children_label_by_id = {} # Create new children for _key, _value in new_value.items(): - child_obj = self._add_key(_key) - child_obj.update_default_value(_value) - child_obj.set_override_state(self._override_state) + child_entity = self._add_key(_key) + child_entity.update_default_value(_value) + label = metadata_labels.get(_key) + if label: + children_label_by_id[child_entity.id] = label + + child_entity.set_override_state(self._override_state) + + self.children_label_by_id = children_label_by_id self._ignore_child_changes = False @@ -546,10 +574,7 @@ class DictMutableKeysEntity(EndpointEntity): self.on_change() def _remove_from_project_override(self, on_change_trigger): - if self._override_state is not OverrideState.PROJECT: - return - - if not self.has_project_override: + if not self.can_remove_from_project_override: return if self._has_studio_override: @@ -565,15 +590,26 @@ class DictMutableKeysEntity(EndpointEntity): # Simulate `clear` method without triggering value change for key in tuple(self.children_by_key.keys()): - child_obj = self.children_by_key.pop(key) + self.children_by_key.pop(key) + + metadata = self._get_metadata_for_state(OverrideState.STUDIO) + metadata_labels = metadata.get(M_DYNAMIC_KEY_LABEL) or {} + children_label_by_id = {} # Create new children for _key, _value in new_value.items(): - child_obj = self._add_key(_key) - child_obj.update_default_value(_value) + child_entity = self._add_key(_key) + child_entity.update_default_value(_value) if self._has_studio_override: - child_obj.update_studio_value(_value) - child_obj.set_override_state(self._override_state) + child_entity.update_studio_value(_value) + + label = metadata_labels.get(_key) + if label: + children_label_by_id[child_entity.id] = label + + child_entity.set_override_state(self._override_state) + + self.children_label_by_id = children_label_by_id self._ignore_child_changes = False diff --git a/openpype/settings/entities/enum_entity.py b/openpype/settings/entities/enum_entity.py index e28fb7478f..693305cb1e 100644 --- a/openpype/settings/entities/enum_entity.py +++ b/openpype/settings/entities/enum_entity.py @@ -117,7 +117,8 @@ class AppsEnumEntity(BaseEnumEntity): valid_keys = set() enum_items = [] - for app_group in system_settings_entity["applications"].values(): + applications_entity = system_settings_entity["applications"] + for group_name, app_group in applications_entity.items(): enabled_entity = app_group.get("enabled") if enabled_entity and not enabled_entity.value: continue @@ -127,17 +128,29 @@ class AppsEnumEntity(BaseEnumEntity): continue group_label = app_group["label"].value - - for variant_name, variant_entity in app_group["variants"].items(): + variants_entity = app_group["variants"] + for variant_name, variant_entity in variants_entity.items(): enabled_entity = variant_entity.get("enabled") if enabled_entity and not enabled_entity.value: continue - variant_label = variant_entity["variant_label"].value + variant_label = None + if "variant_label" in variant_entity: + variant_label = variant_entity["variant_label"].value + elif hasattr(variants_entity, "get_key_label"): + variant_label = variants_entity.get_key_label(variant_name) - full_label = "{} {}".format(group_label, variant_label) - enum_items.append({variant_name: full_label}) - valid_keys.add(variant_name) + if not variant_label: + variant_label = variant_name + + if group_label: + full_label = "{} {}".format(group_label, variant_label) + else: + full_label = variant_label + + full_name = "/".join((group_name, variant_name)) + enum_items.append({full_name: full_label}) + valid_keys.add(full_name) return enum_items, valid_keys def set_override_state(self, *args, **kwargs): diff --git a/openpype/settings/entities/input_entities.py b/openpype/settings/entities/input_entities.py index e406c7797a..2ca20542ef 100644 --- a/openpype/settings/entities/input_entities.py +++ b/openpype/settings/entities/input_entities.py @@ -251,6 +251,9 @@ class InputEntity(EndpointEntity): self._current_value = copy.deepcopy(value) def _discard_changes(self, on_change_trigger=None): + if not self.can_discard_changes: + return + self._value_is_modified = False if self._override_state >= OverrideState.PROJECT: self._has_project_override = self.had_project_override @@ -286,6 +289,9 @@ class InputEntity(EndpointEntity): self.on_change() def _remove_from_studio_default(self, on_change_trigger): + if not self.can_remove_from_studio_default: + return + value = self._default_value if value is NOT_SET: value = self.value_on_not_set @@ -301,10 +307,7 @@ class InputEntity(EndpointEntity): self.on_change() def _remove_from_project_override(self, on_change_trigger): - if self._override_state is not OverrideState.PROJECT: - return - - if not self._has_project_override: + if not self.can_remove_from_project_override: return self._has_project_override = False @@ -376,7 +379,10 @@ class BoolEntity(InputEntity): def _item_initalization(self): self.valid_value_types = (bool, ) - self.value_on_not_set = True + value_on_not_set = self.convert_to_valid_type( + self.schema_data.get("default", True) + ) + self.value_on_not_set = value_on_not_set class TextEntity(InputEntity): diff --git a/openpype/settings/entities/list_entity.py b/openpype/settings/entities/list_entity.py index c6155b78f8..ee647264b7 100644 --- a/openpype/settings/entities/list_entity.py +++ b/openpype/settings/entities/list_entity.py @@ -343,7 +343,7 @@ class ListEntity(EndpointEntity): return output def _discard_changes(self, on_change_trigger): - if self._override_state is OverrideState.NOT_DEFINED: + if not self.can_discard_changes: return not_set = object() @@ -405,7 +405,7 @@ class ListEntity(EndpointEntity): self.on_change() def _remove_from_studio_default(self, on_change_trigger): - if self._override_state is not OverrideState.STUDIO: + if not self.can_remove_from_studio_default: return value = self._default_value @@ -433,10 +433,7 @@ class ListEntity(EndpointEntity): self.on_change() def _remove_from_project_override(self, on_change_trigger): - if self._override_state is not OverrideState.PROJECT: - return - - if not self.has_project_override: + if not self.can_remove_from_project_override: return if self._has_studio_override: diff --git a/openpype/settings/entities/root_entities.py b/openpype/settings/entities/root_entities.py index eed3d47f46..b89473d9fb 100644 --- a/openpype/settings/entities/root_entities.py +++ b/openpype/settings/entities/root_entities.py @@ -23,6 +23,7 @@ from openpype.settings.constants import ( PROJECT_ANATOMY_KEY, KEY_REGEX ) +from openpype.settings.exceptions import SaveWarningExc from openpype.settings.lib import ( DEFAULTS_DIR, @@ -724,8 +725,19 @@ class ProjectSettings(RootEntity): project_settings = settings_value.get(PROJECT_SETTINGS_KEY) or {} project_anatomy = settings_value.get(PROJECT_ANATOMY_KEY) or {} - save_project_settings(self.project_name, project_settings) - save_project_anatomy(self.project_name, project_anatomy) + warnings = [] + try: + save_project_settings(self.project_name, project_settings) + except SaveWarningExc as exc: + warnings.extend(exc.warnings) + + try: + save_project_anatomy(self.project_name, project_anatomy) + except SaveWarningExc as exc: + warnings.extend(exc.warnings) + + if warnings: + raise SaveWarningExc(warnings) def _validate_defaults_to_save(self, value): """Valiations of default values before save.""" diff --git a/openpype/settings/entities/schemas/projects_schema/schema_main.json b/openpype/settings/entities/schemas/projects_schema/schema_main.json index 565500edd2..6bc158aa60 100644 --- a/openpype/settings/entities/schemas/projects_schema/schema_main.json +++ b/openpype/settings/entities/schemas/projects_schema/schema_main.json @@ -82,6 +82,10 @@ "type": "schema", "name": "schema_project_harmony" }, + { + "type": "schema", + "name": "schema_project_tvpaint" + }, { "type": "schema", "name": "schema_project_celaction" diff --git a/openpype/settings/entities/schemas/projects_schema/schema_project_deadline.json b/openpype/settings/entities/schemas/projects_schema/schema_project_deadline.json index f46221ba63..1346fb3dad 100644 --- a/openpype/settings/entities/schemas/projects_schema/schema_project_deadline.json +++ b/openpype/settings/entities/schemas/projects_schema/schema_project_deadline.json @@ -128,6 +128,11 @@ "key": "department", "label": "Department" }, + { + "type": "boolean", + "key": "use_gpu", + "label": "Use GPU" + }, { "type": "dict-modifiable", "key": "limit_groups", diff --git a/openpype/settings/entities/schemas/projects_schema/schema_project_ftrack.json b/openpype/settings/entities/schemas/projects_schema/schema_project_ftrack.json index eefc0e12b7..a801175031 100644 --- a/openpype/settings/entities/schemas/projects_schema/schema_project_ftrack.json +++ b/openpype/settings/entities/schemas/projects_schema/schema_project_ftrack.json @@ -36,6 +36,25 @@ } ] }, + { + "type": "dict", + "key": "prepare_project", + "label": "Prepare Project", + "checkbox_key": "enabled", + "children": [ + { + "type": "boolean", + "key": "enabled", + "label": "Enabled" + }, + { + "type": "list", + "key": "role_list", + "label": "Roles", + "object_type": "text" + } + ] + }, { "type": "dict", "key": "sync_hier_entity_attributes", diff --git a/openpype/settings/entities/schemas/projects_schema/schema_project_hiero.json b/openpype/settings/entities/schemas/projects_schema/schema_project_hiero.json index d2191a45a0..f717eff7dd 100644 --- a/openpype/settings/entities/schemas/projects_schema/schema_project_hiero.json +++ b/openpype/settings/entities/schemas/projects_schema/schema_project_hiero.json @@ -120,6 +120,45 @@ } ] }, + { + "type": "dict", + "collapsible": true, + "key": "load", + "label": "Loader plugins", + "children": [ + { + "type": "dict", + "collapsible": true, + "key": "LoadClip", + "label": "Load Clip", + "checkbox_key": "enabled", + "children": [ + { + "type": "boolean", + "key": "enabled", + "label": "Enabled" + }, + { + "type": "list", + "key": "families", + "label": "Families", + "object_type": "text" + }, + { + "type": "list", + "key": "representations", + "label": "Representations", + "object_type": "text" + }, + { + "type": "text", + "key": "clip_name_template", + "label": "Clip name template" + } + ] + } + ] + }, { "type": "dict", "collapsible": true, diff --git a/openpype/settings/entities/schemas/projects_schema/schema_project_nuke.json b/openpype/settings/entities/schemas/projects_schema/schema_project_nuke.json index 75ca5411a1..5022b75719 100644 --- a/openpype/settings/entities/schemas/projects_schema/schema_project_nuke.json +++ b/openpype/settings/entities/schemas/projects_schema/schema_project_nuke.json @@ -43,6 +43,16 @@ "label": "Build Workfile" } ] + }, + { + "type": "boolean", + "key": "open_workfile_at_start", + "label": "Open Workfile window at start of a Nuke session" + }, + { + "type": "boolean", + "key": "create_initial_workfile", + "label": "Create initial workfile version if none available" } ] }, @@ -87,7 +97,7 @@ "name": "schema_nuke_publish", "template_data": [] }, - { + { "type": "schema", "name": "schema_nuke_load", "template_data": [] @@ -101,4 +111,4 @@ "name": "schema_publish_gui_filter" } ] -} +} \ No newline at end of file diff --git a/openpype/settings/entities/schemas/projects_schema/schema_project_syncserver.json b/openpype/settings/entities/schemas/projects_schema/schema_project_syncserver.json index fd728f3982..ea1b8fc9da 100644 --- a/openpype/settings/entities/schemas/projects_schema/schema_project_syncserver.json +++ b/openpype/settings/entities/schemas/projects_schema/schema_project_syncserver.json @@ -1,7 +1,7 @@ { "type": "dict", "key": "sync_server", - "label": "Sync Server (currently unused)", + "label": "Site Sync (beta testing)", "collapsible": true, "checkbox_key": "enabled", "is_file": true, diff --git a/openpype/settings/entities/schemas/projects_schema/schema_project_tvpaint.json b/openpype/settings/entities/schemas/projects_schema/schema_project_tvpaint.json new file mode 100644 index 0000000000..ab404f03ff --- /dev/null +++ b/openpype/settings/entities/schemas/projects_schema/schema_project_tvpaint.json @@ -0,0 +1,55 @@ +{ + "type": "dict", + "collapsible": true, + "key": "tvpaint", + "label": "TVPaint", + "is_file": true, + "children": [ + { + "type": "dict", + "collapsible": true, + "key": "publish", + "label": "Publish plugins", + "is_file": true, + "children": [ + { + "type": "schema_template", + "name": "template_publish_plugin", + "template_data": [ + { + "key": "ValidateProjectSettings", + "label": "ValidateProjectSettings", + "docstring": "Validate if FPS and Resolution match shot data" + } + ] + }, + { + "type": "schema_template", + "name": "template_publish_plugin", + "template_data": [ + { + "key": "ValidateMarks", + "label": "Validate MarkIn/Out", + "docstring": "Validate MarkIn/Out match Frame start/end on shot data" + } + ] + }, + { + "type": "schema_template", + "name": "template_publish_plugin", + "template_data": [ + { + "key": "ValidateAssetName", + "label": "ValidateAssetName", + "docstring": "Validate if shot on instances metadata is same as workfiles shot" + } + ] + } + ] + }, + { + "type": "schema", + "name": "schema_publish_gui_filter" + } + ] +} diff --git a/openpype/settings/entities/schemas/projects_schema/schemas/schema_anatomy_imageio.json b/openpype/settings/entities/schemas/projects_schema/schemas/schema_anatomy_imageio.json index b48f90bd91..edd5c18f51 100644 --- a/openpype/settings/entities/schemas/projects_schema/schemas/schema_anatomy_imageio.json +++ b/openpype/settings/entities/schemas/projects_schema/schemas/schema_anatomy_imageio.json @@ -118,6 +118,19 @@ "type": "dict", "label": "Nuke", "children": [ + { + "key": "viewer", + "type": "dict", + "label": "Viewer", + "collapsible": false, + "children": [ + { + "type": "text", + "key": "viewerProcess", + "label": "Viewer Process" + } + ] + }, { "key": "workfile", "type": "dict", diff --git a/openpype/settings/entities/schemas/projects_schema/schemas/schema_global_publish.json b/openpype/settings/entities/schemas/projects_schema/schemas/schema_global_publish.json index 3c079a130d..1bd028ac79 100644 --- a/openpype/settings/entities/schemas/projects_schema/schemas/schema_global_publish.json +++ b/openpype/settings/entities/schemas/projects_schema/schemas/schema_global_publish.json @@ -203,6 +203,69 @@ "default": 0, "minimum": 0, "maximum": 100000 + }, + { + "key": "letter_box", + "label": "Letter box", + "type": "dict", + "checkbox_key": "enabled", + "children": [ + { + "type": "boolean", + "key": "enabled", + "label": "Enabled", + "default": false + }, + { + "key": "ratio", + "label": "Ratio", + "type": "number", + "decimal": 4, + "default": 0, + "minimum": 0, + "maximum": 10000 + }, + { + "key": "state", + "label": "Type", + "type": "enum", + "enum_items": [ + { + "letterbox": "Letterbox" + }, + { + "pillar": "Pillar" + } + ] + }, + { + "type": "schema_template", + "name": "template_rgba_color", + "template_data": [ + { + "label": "Fill Color", + "name": "fill_color" + } + ] + }, + { + "key": "line_thickness", + "label": "Line Thickness", + "type": "number", + "minimum": 0, + "maximum": 1000 + }, + { + "type": "schema_template", + "name": "template_rgba_color", + "template_data": [ + { + "label": "Line Color", + "name": "line_color" + } + ] + } + ] } ] } diff --git a/openpype/settings/entities/schemas/projects_schema/schemas/schema_maya_create.json b/openpype/settings/entities/schemas/projects_schema/schemas/schema_maya_create.json index 575e04c85d..d728f1def3 100644 --- a/openpype/settings/entities/schemas/projects_schema/schemas/schema_maya_create.json +++ b/openpype/settings/entities/schemas/projects_schema/schemas/schema_maya_create.json @@ -4,6 +4,31 @@ "key": "create", "label": "Creator plugins", "children": [ + { + "type": "dict", + "collapsible": true, + "key": "CreateLook", + "label": "Create Look", + "checkbox_key": "enabled", + "children": [ + { + "type": "boolean", + "key": "enabled", + "label": "Enabled" + }, + { + "type": "boolean", + "key": "make_tx", + "label": "Make tx files" + }, + { + "type": "list", + "key": "defaults", + "label": "Default Subsets", + "object_type": "text" + } + ] + }, { "type": "schema_template", "name": "template_create_plugin", @@ -28,10 +53,6 @@ "key": "CreateLayout", "label": "Create Layout" }, - { - "key": "CreateLook", - "label": "Create Look" - }, { "key": "CreateMayaScene", "label": "Create Maya Scene" diff --git a/openpype/settings/entities/schemas/projects_schema/schemas/schema_maya_publish.json b/openpype/settings/entities/schemas/projects_schema/schemas/schema_maya_publish.json index 9d2e39edde..95b02a7936 100644 --- a/openpype/settings/entities/schemas/projects_schema/schemas/schema_maya_publish.json +++ b/openpype/settings/entities/schemas/projects_schema/schemas/schema_maya_publish.json @@ -283,34 +283,6 @@ "is_list": true } ] - }, - { - "type": "dict", - "collapsible": true, - "key": "MayaSubmitDeadline", - "label": "Submit maya job to deadline", - "checkbox_key": "enabled", - "children": [ - { - "type": "boolean", - "key": "enabled", - "label": "Enabled" - }, - { - "type": "enum", - "key": "tile_assembler_plugin", - "label": "Tile Assembler Plugin", - "multiselection": false, - "enum_items": [ - { - "DraftTileAssembler": "Draft Tile Assembler" - }, - { - "oiio": "Open Image IO" - } - ] - } - ] } ] } diff --git a/openpype/settings/entities/schemas/projects_schema/schemas/schema_nuke_publish.json b/openpype/settings/entities/schemas/projects_schema/schemas/schema_nuke_publish.json index 0e3770ac78..087e6c13a9 100644 --- a/openpype/settings/entities/schemas/projects_schema/schemas/schema_nuke_publish.json +++ b/openpype/settings/entities/schemas/projects_schema/schemas/schema_nuke_publish.json @@ -146,35 +146,6 @@ "label": "Viewer LUT raw" } ] - }, - { - "type": "dict", - "collapsible": true, - "key": "NukeSubmitDeadline", - "label": "NukeSubmitDeadline", - "is_group": true, - "children": [ - { - "type": "number", - "key": "deadline_priority", - "label": "deadline_priority" - }, - { - "type": "text", - "key": "deadline_pool", - "label": "deadline_pool" - }, - { - "type": "text", - "key": "deadline_pool_secondary", - "label": "deadline_pool_secondary" - }, - { - "type": "number", - "key": "deadline_chunk_size", - "label": "deadline_chunk_size" - } - ] } ] } diff --git a/openpype/settings/entities/schemas/projects_schema/schemas/template_rgba_color.json b/openpype/settings/entities/schemas/projects_schema/schemas/template_rgba_color.json new file mode 100644 index 0000000000..ffe530175a --- /dev/null +++ b/openpype/settings/entities/schemas/projects_schema/schemas/template_rgba_color.json @@ -0,0 +1,33 @@ +[ + { + "type": "list-strict", + "key": "{name}", + "label": "{label}", + "object_types": [ + { + "label": "R", + "type": "number", + "minimum": 0, + "maximum": 255 + }, + { + "label": "G", + "type": "number", + "minimum": 0, + "maximum": 255 + }, + { + "label": "B", + "type": "number", + "minimum": 0, + "maximum": 255 + }, + { + "label": "A", + "type": "number", + "minimum": 0, + "maximum": 255 + } + ] + } +] diff --git a/openpype/settings/entities/schemas/system_schema/host_settings/schema_aftereffects.json b/openpype/settings/entities/schemas/system_schema/host_settings/schema_aftereffects.json index cd080bf0f2..afadf48173 100644 --- a/openpype/settings/entities/schemas/system_schema/host_settings/schema_aftereffects.json +++ b/openpype/settings/entities/schemas/system_schema/host_settings/schema_aftereffects.json @@ -29,13 +29,11 @@ "template_data": [ { "app_variant_label": "2020", - "app_variant": "2020", - "app_name": "aftereffects" + "app_variant": "2020" }, { "app_variant_label": "2021", - "app_variant": "2021", - "app_name": "aftereffects" + "app_variant": "2021" } ] } diff --git a/openpype/settings/entities/schemas/system_schema/host_settings/schema_blender.json b/openpype/settings/entities/schemas/system_schema/host_settings/schema_blender.json index 2501e94b50..e6e7381e9f 100644 --- a/openpype/settings/entities/schemas/system_schema/host_settings/schema_blender.json +++ b/openpype/settings/entities/schemas/system_schema/host_settings/schema_blender.json @@ -20,26 +20,22 @@ "type": "raw-json" }, { - "type": "dict", + "type": "dict-modifiable", "key": "variants", - "children": [ - { - "type": "schema_template", - "name": "template_host_variant", - "template_data": [ - { - "app_variant_label": "2.83", - "app_variant": "2-83", - "app_name": "blender" - }, - { - "app_variant_label": "2.90", - "app_variant": "2-90", - "app_name": "blender" - } - ] - } - ] + "collapsible_key": true, + "dynamic_label": false, + "use_label_wrap": false, + "object_type": { + "type": "dict", + "collapsible": true, + "checkbox_key": "enabled", + "children": [ + { + "type": "schema_template", + "name": "template_host_variant_items" + } + ] + } } ] } diff --git a/openpype/settings/entities/schemas/system_schema/host_settings/schema_celaction.json b/openpype/settings/entities/schemas/system_schema/host_settings/schema_celaction.json index fbdad62a92..82be15c3b0 100644 --- a/openpype/settings/entities/schemas/system_schema/host_settings/schema_celaction.json +++ b/openpype/settings/entities/schemas/system_schema/host_settings/schema_celaction.json @@ -29,10 +29,7 @@ "template_data": [ { "app_variant_label": "Local", - "app_variant": "Local", - "app_name": "celation", - "multiplatform": false, - "multipath_executables": false + "app_variant": "local" } ] } diff --git a/openpype/settings/entities/schemas/system_schema/host_settings/schema_djv.json b/openpype/settings/entities/schemas/system_schema/host_settings/schema_djv.json index 381437d4ff..a95cedf7c3 100644 --- a/openpype/settings/entities/schemas/system_schema/host_settings/schema_djv.json +++ b/openpype/settings/entities/schemas/system_schema/host_settings/schema_djv.json @@ -20,19 +20,22 @@ "type": "raw-json" }, { - "type": "dict", + "type": "dict-modifiable", "key": "variants", - "children": [ - { - "type": "schema_template", - "name": "template_host_variant", - "template_data": { - "app_variant_label": "1.1", - "app_variant": "1-1", - "app_name": "djvview" + "collapsible_key": true, + "dynamic_label": false, + "use_label_wrap": false, + "object_type": { + "type": "dict", + "collapsible": true, + "checkbox_key": "enabled", + "children": [ + { + "type": "schema_template", + "name": "template_host_variant_items" } - } - ] + ] + } } ] } diff --git a/openpype/settings/entities/schemas/system_schema/host_settings/schema_fusion.json b/openpype/settings/entities/schemas/system_schema/host_settings/schema_fusion.json index 8661916d06..58f37fa99b 100644 --- a/openpype/settings/entities/schemas/system_schema/host_settings/schema_fusion.json +++ b/openpype/settings/entities/schemas/system_schema/host_settings/schema_fusion.json @@ -29,13 +29,11 @@ "template_data": [ { "app_variant_label": "16", - "app_variant": "16", - "app_name": "fusion" + "app_variant": "16" }, { "app_variant_label": "9", - "app_variant": "9", - "app_name": "fusion" + "app_variant": "9" } ] } diff --git a/openpype/settings/entities/schemas/system_schema/host_settings/schema_harmony.json b/openpype/settings/entities/schemas/system_schema/host_settings/schema_harmony.json index 7c59b0febd..083885a53b 100644 --- a/openpype/settings/entities/schemas/system_schema/host_settings/schema_harmony.json +++ b/openpype/settings/entities/schemas/system_schema/host_settings/schema_harmony.json @@ -29,13 +29,11 @@ "template_data": [ { "app_variant_label": "20", - "app_variant": "20", - "app_name": "harmony" + "app_variant": "20" }, { "app_variant_label": "17", - "app_variant": "17", - "app_name": "harmony" + "app_variant": "17" } ] } diff --git a/openpype/settings/entities/schemas/system_schema/host_settings/schema_houdini.json b/openpype/settings/entities/schemas/system_schema/host_settings/schema_houdini.json index 70e06d170d..22a5b2e737 100644 --- a/openpype/settings/entities/schemas/system_schema/host_settings/schema_houdini.json +++ b/openpype/settings/entities/schemas/system_schema/host_settings/schema_houdini.json @@ -20,31 +20,22 @@ "type": "raw-json" }, { - "type": "dict", + "type": "dict-modifiable", "key": "variants", - "children": [ - { - "type": "schema_template", - "name": "template_host_variant", - "template_data": [ - { - "app_variant_label": "18.5", - "app_variant": "18-5", - "app_name": "houdini" - }, - { - "app_variant_label": "18", - "app_variant": "18", - "app_name": "houdini" - }, - { - "app_variant_label": "17", - "app_variant": "17", - "app_name": "houdini" - } - ] - } - ] + "collapsible_key": true, + "dynamic_label": false, + "use_label_wrap": false, + "object_type": { + "type": "dict", + "collapsible": true, + "checkbox_key": "enabled", + "children": [ + { + "type": "schema_template", + "name": "template_host_variant_items" + } + ] + } } ] } diff --git a/openpype/settings/entities/schemas/system_schema/host_settings/schema_maya.json b/openpype/settings/entities/schemas/system_schema/host_settings/schema_maya.json index 07c8aa0106..7c33671fa7 100644 --- a/openpype/settings/entities/schemas/system_schema/host_settings/schema_maya.json +++ b/openpype/settings/entities/schemas/system_schema/host_settings/schema_maya.json @@ -20,31 +20,22 @@ "type": "raw-json" }, { - "type": "dict", + "type": "dict-modifiable", "key": "variants", - "children": [ - { - "type": "schema_template", - "name": "template_host_variant", - "template_data": [ - { - "app_variant_label": "2020", - "app_variant": "2020", - "app_name": "maya" - }, - { - "app_variant_label": "2019", - "app_variant": "2019", - "app_name": "maya" - }, - { - "app_variant_label": "2018", - "app_variant": "2018", - "app_name": "maya" - } - ] - } - ] + "collapsible_key": true, + "dynamic_label": false, + "use_label_wrap": false, + "object_type": { + "type": "dict", + "collapsible": true, + "checkbox_key": "enabled", + "children": [ + { + "type": "schema_template", + "name": "template_host_variant_items" + } + ] + } } ] } diff --git a/openpype/settings/entities/schemas/system_schema/host_settings/schema_mayabatch.json b/openpype/settings/entities/schemas/system_schema/host_settings/schema_mayabatch.json deleted file mode 100644 index bea59656af..0000000000 --- a/openpype/settings/entities/schemas/system_schema/host_settings/schema_mayabatch.json +++ /dev/null @@ -1,50 +0,0 @@ -{ - "type": "dict", - "key": "mayabatch", - "label": "Autodesk Maya Batch", - "collapsible": true, - "checkbox_key": "enabled", - "children": [ - { - "type": "boolean", - "key": "enabled", - "label": "Enabled" - }, - { - "type": "schema_template", - "name": "template_host_unchangables" - }, - { - "key": "environment", - "label": "Environment", - "type": "raw-json" - }, - { - "type": "dict", - "key": "variants", - "children": [ - { - "type": "schema_template", - "name": "template_host_variant", - "template_data": [ - { - "app_variant_label": "2020", - "app_variant": "2020", - "app_name": "mayabatch" - }, - { - "app_variant_label": "2019", - "app_variant": "2019", - "app_name": "mayabatch" - }, - { - "app_variant_label": "2018", - "app_variant": "2018", - "app_name": "mayabatch" - } - ] - } - ] - } - ] -} diff --git a/openpype/settings/entities/schemas/system_schema/host_settings/schema_photoshop.json b/openpype/settings/entities/schemas/system_schema/host_settings/schema_photoshop.json index 6f67e29df2..9c21166b63 100644 --- a/openpype/settings/entities/schemas/system_schema/host_settings/schema_photoshop.json +++ b/openpype/settings/entities/schemas/system_schema/host_settings/schema_photoshop.json @@ -29,13 +29,11 @@ "template_data": [ { "app_variant_label": "2020", - "app_variant": "2020", - "app_name": "photoshop" + "app_variant": "2020" }, { "app_variant_label": "2021", - "app_variant": "2021", - "app_name": "photoshop" + "app_variant": "2021" } ] } diff --git a/openpype/settings/entities/schemas/system_schema/host_settings/schema_resolve.json b/openpype/settings/entities/schemas/system_schema/host_settings/schema_resolve.json index 644e3046ce..ab2b86bf87 100644 --- a/openpype/settings/entities/schemas/system_schema/host_settings/schema_resolve.json +++ b/openpype/settings/entities/schemas/system_schema/host_settings/schema_resolve.json @@ -29,8 +29,7 @@ "template_data": [ { "app_variant_label": "16", - "app_variant": "16", - "app_name": "resolve" + "app_variant": "16" } ] } diff --git a/openpype/settings/entities/schemas/system_schema/host_settings/schema_shell.json b/openpype/settings/entities/schemas/system_schema/host_settings/schema_shell.json index f2f9376029..e344f98594 100644 --- a/openpype/settings/entities/schemas/system_schema/host_settings/schema_shell.json +++ b/openpype/settings/entities/schemas/system_schema/host_settings/schema_shell.json @@ -16,31 +16,22 @@ "type": "raw-json" }, { - "type": "dict", + "type": "dict-modifiable", "key": "variants", - "children": [ - { - "type": "schema_template", - "name": "template_host_variant", - "template_data": [ - { - "app_variant": "python_3-7", - "app_variant_label": "Python 3.7", - "app_name": "python" - }, - { - "app_variant": "python_2-7", - "app_variant_label": "Python 2.7", - "app_name": "python" - }, - { - "app_variant": "terminal", - "app_variant_label": "Terminal", - "app_name": "terminal" - } - ] - } - ] + "collapsible_key": true, + "dynamic_label": false, + "use_label_wrap": false, + "object_type": { + "type": "dict", + "collapsible": true, + "checkbox_key": "enabled", + "children": [ + { + "type": "schema_template", + "name": "template_host_variant_items" + } + ] + } } ] } diff --git a/openpype/settings/entities/schemas/system_schema/host_settings/schema_tvpaint.json b/openpype/settings/entities/schemas/system_schema/host_settings/schema_tvpaint.json index fa28c4448c..eac09be113 100644 --- a/openpype/settings/entities/schemas/system_schema/host_settings/schema_tvpaint.json +++ b/openpype/settings/entities/schemas/system_schema/host_settings/schema_tvpaint.json @@ -20,26 +20,22 @@ "type": "raw-json" }, { - "type": "dict", + "type": "dict-modifiable", "key": "variants", - "children": [ - { - "type": "schema_template", - "name": "template_host_variant", - "template_data": [ - { - "app_variant_label": "Animation 11 (64bits)", - "app_variant": "animation_11-64bits", - "app_name": "tvpaint" - }, - { - "app_variant_label": "Animation 11 (32bits)", - "app_variant": "animation_11-32bits", - "app_name": "tvpaint" - } - ] - } - ] + "collapsible_key": true, + "dynamic_label": false, + "use_label_wrap": false, + "object_type": { + "type": "dict", + "collapsible": true, + "checkbox_key": "enabled", + "children": [ + { + "type": "schema_template", + "name": "template_host_variant_items" + } + ] + } } ] } diff --git a/openpype/settings/entities/schemas/system_schema/host_settings/schema_unreal.json b/openpype/settings/entities/schemas/system_schema/host_settings/schema_unreal.json index e9d1b68130..c5096197d6 100644 --- a/openpype/settings/entities/schemas/system_schema/host_settings/schema_unreal.json +++ b/openpype/settings/entities/schemas/system_schema/host_settings/schema_unreal.json @@ -20,21 +20,22 @@ "type": "raw-json" }, { - "type": "dict", + "type": "dict-modifiable", "key": "variants", - "children": [ - { - "type": "schema_template", - "name": "template_host_variant", - "template_data": [ - { - "app_variant": "4-24", - "app_variant_label": "4.24", - "app_name": "unreal" - } - ] - } - ] + "collapsible_key": true, + "dynamic_label": false, + "use_label_wrap": false, + "object_type": { + "type": "dict", + "collapsible": true, + "checkbox_key": "enabled", + "children": [ + { + "type": "schema_template", + "name": "template_host_variant_items" + } + ] + } } ] } diff --git a/openpype/settings/entities/schemas/system_schema/host_settings/template_host_variant.json b/openpype/settings/entities/schemas/system_schema/host_settings/template_host_variant.json index 10aab06466..33cde3d216 100644 --- a/openpype/settings/entities/schemas/system_schema/host_settings/template_host_variant.json +++ b/openpype/settings/entities/schemas/system_schema/host_settings/template_host_variant.json @@ -1,13 +1,7 @@ [ - { - "__default_values__": { - "multipath_executables": true, - "multiplatform": true - } - }, { "type": "dict", - "key": "{app_name}_{app_variant}", + "key": "{app_variant}", "label": "{app_variant_label}", "collapsible": true, "checkbox_key": "enabled", @@ -21,49 +15,11 @@ "type": "text", "key": "variant_label", "label": "Variant label", - "placeholder": "Only \"Label\" is used if not filled.", - "roles": ["developer"] + "placeholder": "< {app_variant} >" }, { - "type": "path", - "key": "executables", - "label": "Executables", - "multiplatform": "{multiplatform}", - "multipath": "{multipath_executables}" - }, - { - "type":"separator" - }, - { - "type": "dict", - "key": "arguments", - "label": "Arguments", - "use_label_wrap": false, - "children": [ - { - "key": "windows", - "label": "Windows", - "type": "list", - "object_type": "text" - }, - { - "key": "darwin", - "label": "MacOS", - "type": "list", - "object_type": "text" - }, - { - "key": "linux", - "label": "Linux", - "type": "list", - "object_type": "text" - } - ] - }, - { - "key": "environment", - "label": "Environment", - "type": "raw-json" + "type": "schema_template", + "name": "template_host_variant_items" } ] } diff --git a/openpype/settings/entities/schemas/system_schema/host_settings/template_host_variant_items.json b/openpype/settings/entities/schemas/system_schema/host_settings/template_host_variant_items.json new file mode 100644 index 0000000000..472840d8fc --- /dev/null +++ b/openpype/settings/entities/schemas/system_schema/host_settings/template_host_variant_items.json @@ -0,0 +1,49 @@ +[ + { + "type": "boolean", + "key": "use_python_2", + "label": "Use Python 2", + "default": false + }, + { + "type": "path", + "key": "executables", + "label": "Executables", + "multiplatform": true, + "multipath": true + }, + { + "type":"separator" + }, + { + "type": "dict", + "key": "arguments", + "label": "Arguments", + "use_label_wrap": false, + "children": [ + { + "key": "windows", + "label": "Windows", + "type": "list", + "object_type": "text" + }, + { + "key": "darwin", + "label": "MacOS", + "type": "list", + "object_type": "text" + }, + { + "key": "linux", + "label": "Linux", + "type": "list", + "object_type": "text" + } + ] + }, + { + "key": "environment", + "label": "Environment", + "type": "raw-json" + } +] diff --git a/openpype/settings/entities/schemas/system_schema/host_settings/template_nuke.json b/openpype/settings/entities/schemas/system_schema/host_settings/template_nuke.json index d99e0b9a85..3f25c7d72f 100644 --- a/openpype/settings/entities/schemas/system_schema/host_settings/template_nuke.json +++ b/openpype/settings/entities/schemas/system_schema/host_settings/template_nuke.json @@ -21,36 +21,22 @@ "type": "raw-json" }, { - "type": "dict", + "type": "dict-modifiable", "key": "variants", - "children": [ - { - "type": "schema_template", - "name": "template_host_variant", - "template_data": [ - { - "app_variant": "12-2", - "app_variant_label": "12.2", - "app_name": "{nuke_type}" - }, - { - "app_variant": "12-0", - "app_variant_label": "12.0", - "app_name": "{nuke_type}" - }, - { - "app_variant": "11-3", - "app_variant_label": "11.3", - "app_name": "{nuke_type}" - }, - { - "app_variant": "11-2", - "app_variant_label": "11.2", - "app_name": "{nuke_type}" - } - ] - } - ] + "collapsible_key": true, + "dynamic_label": false, + "use_label_wrap": false, + "object_type": { + "type": "dict", + "collapsible": true, + "checkbox_key": "enabled", + "children": [ + { + "type": "schema_template", + "name": "template_host_variant_items" + } + ] + } } ] } diff --git a/openpype/settings/entities/schemas/system_schema/schema_applications.json b/openpype/settings/entities/schemas/system_schema/schema_applications.json index 61d47df8b6..efdd021ede 100644 --- a/openpype/settings/entities/schemas/system_schema/schema_applications.json +++ b/openpype/settings/entities/schemas/system_schema/schema_applications.json @@ -9,10 +9,6 @@ "type": "schema", "name": "schema_maya" }, - { - "type": "schema", - "name": "schema_mayabatch" - }, { "type": "schema_template", "name": "template_nuke", diff --git a/openpype/settings/entities/schemas/system_schema/schema_modules.json b/openpype/settings/entities/schemas/system_schema/schema_modules.json index 8bfb0e90dc..878958b12d 100644 --- a/openpype/settings/entities/schemas/system_schema/schema_modules.json +++ b/openpype/settings/entities/schemas/system_schema/schema_modules.json @@ -42,6 +42,11 @@ "key": "enabled", "label": "Enabled" }, + { + "type": "boolean", + "key": "auto_stop", + "label": "Auto stop timer" + }, { "type": "number", "decimal": 2, @@ -77,7 +82,7 @@ }, { "type": "dict", "key": "sync_server", - "label": "Sync Server", + "label": "Site Sync", "collapsible": true, "checkbox_key": "enabled", "children": [{ @@ -149,20 +154,6 @@ } ] }, - { - "type": "dict", - "key": "user", - "label": "User setting", - "collapsible": true, - "checkbox_key": "enabled", - "children": [ - { - "type": "boolean", - "key": "enabled", - "label": "Enabled" - } - ] - }, { "type": "dict", "key": "standalonepublish_tool", @@ -176,20 +167,6 @@ "label": "Enabled" } ] - }, - { - "type": "dict", - "key": "idle_manager", - "label": "Idle Manager", - "collapsible": true, - "checkbox_key": "enabled", - "children": [ - { - "type": "boolean", - "key": "enabled", - "label": "Enabled" - } - ] } ] } diff --git a/openpype/settings/exceptions.py b/openpype/settings/exceptions.py new file mode 100644 index 0000000000..a06138eeaf --- /dev/null +++ b/openpype/settings/exceptions.py @@ -0,0 +1,11 @@ +class SaveSettingsValidation(Exception): + pass + + +class SaveWarningExc(SaveSettingsValidation): + def __init__(self, warnings): + if isinstance(warnings, str): + warnings = [warnings] + self.warnings = warnings + msg = " | ".join(warnings) + super(SaveWarningExc, self).__init__(msg) diff --git a/openpype/settings/lib.py b/openpype/settings/lib.py index 60a51c01a0..f61166fa69 100644 --- a/openpype/settings/lib.py +++ b/openpype/settings/lib.py @@ -4,6 +4,9 @@ import functools import logging import platform import copy +from .exceptions import ( + SaveWarningExc +) from .constants import ( M_OVERRIDEN_KEY, M_ENVIRONMENT_KEY, @@ -101,8 +104,14 @@ def save_studio_settings(data): For saving of data cares registered Settings handler. + Warning messages are not logged as module raising them should log it within + it's logger. + Args: data(dict): Overrides data with metadata defying studio overrides. + + Raises: + SaveWarningExc: If any module raises the exception. """ # Notify Pype modules from openpype.modules import ModulesManager, ISettingsChangeListener @@ -110,15 +119,25 @@ def save_studio_settings(data): old_data = get_system_settings() default_values = get_default_settings()[SYSTEM_SETTINGS_KEY] new_data = apply_overrides(default_values, copy.deepcopy(data)) + new_data_with_metadata = copy.deepcopy(new_data) clear_metadata_from_settings(new_data) changes = calculate_changes(old_data, new_data) modules_manager = ModulesManager(_system_settings=new_data) + + warnings = [] for module in modules_manager.get_enabled_modules(): if isinstance(module, ISettingsChangeListener): - module.on_system_settings_save(old_data, new_data, changes) + try: + module.on_system_settings_save( + old_data, new_data, changes, new_data_with_metadata + ) + except SaveWarningExc as exc: + warnings.extend(exc.warnings) - return _SETTINGS_HANDLER.save_studio_settings(data) + _SETTINGS_HANDLER.save_studio_settings(data) + if warnings: + raise SaveWarningExc(warnings) @require_handler @@ -130,10 +149,16 @@ def save_project_settings(project_name, overrides): For saving of data cares registered Settings handler. + Warning messages are not logged as module raising them should log it within + it's logger. + Args: project_name (str): Project name for which overrides are passed. Default project's value is None. overrides(dict): Overrides data with metadata defying studio overrides. + + Raises: + SaveWarningExc: If any module raises the exception. """ # Notify Pype modules from openpype.modules import ModulesManager, ISettingsChangeListener @@ -151,17 +176,29 @@ def save_project_settings(project_name, overrides): old_data = get_default_project_settings(exclude_locals=True) new_data = apply_overrides(default_values, copy.deepcopy(overrides)) + new_data_with_metadata = copy.deepcopy(new_data) clear_metadata_from_settings(new_data) changes = calculate_changes(old_data, new_data) modules_manager = ModulesManager() + warnings = [] for module in modules_manager.get_enabled_modules(): if isinstance(module, ISettingsChangeListener): - module.on_project_settings_save( - old_data, new_data, project_name, changes - ) + try: + module.on_project_settings_save( + old_data, + new_data, + project_name, + changes, + new_data_with_metadata + ) + except SaveWarningExc as exc: + warnings.extend(exc.warnings) - return _SETTINGS_HANDLER.save_project_settings(project_name, overrides) + _SETTINGS_HANDLER.save_project_settings(project_name, overrides) + + if warnings: + raise SaveWarningExc(warnings) @require_handler @@ -173,10 +210,16 @@ def save_project_anatomy(project_name, anatomy_data): For saving of data cares registered Settings handler. + Warning messages are not logged as module raising them should log it within + it's logger. + Args: project_name (str): Project name for which overrides are passed. Default project's value is None. overrides(dict): Overrides data with metadata defying studio overrides. + + Raises: + SaveWarningExc: If any module raises the exception. """ # Notify Pype modules from openpype.modules import ModulesManager, ISettingsChangeListener @@ -194,17 +237,29 @@ def save_project_anatomy(project_name, anatomy_data): old_data = get_default_anatomy_settings(exclude_locals=True) new_data = apply_overrides(default_values, copy.deepcopy(anatomy_data)) + new_data_with_metadata = copy.deepcopy(new_data) clear_metadata_from_settings(new_data) changes = calculate_changes(old_data, new_data) modules_manager = ModulesManager() + warnings = [] for module in modules_manager.get_enabled_modules(): if isinstance(module, ISettingsChangeListener): - module.on_project_anatomy_save( - old_data, new_data, changes, project_name - ) + try: + module.on_project_anatomy_save( + old_data, + new_data, + changes, + project_name, + new_data_with_metadata + ) + except SaveWarningExc as exc: + warnings.extend(exc.warnings) - return _SETTINGS_HANDLER.save_project_anatomy(project_name, anatomy_data) + _SETTINGS_HANDLER.save_project_anatomy(project_name, anatomy_data) + + if warnings: + raise SaveWarningExc(warnings) @require_handler @@ -489,7 +544,7 @@ def apply_local_settings_on_system_settings(system_settings, local_settings): # TODO This is temporary fix until launch arguments will be stored # per platform and not per executable. # - local settings store only executable - new_executables = [[executable, ""]] + new_executables = [executable] new_executables.extend(platform_executables) variants[app_name]["executables"] = new_executables @@ -645,13 +700,22 @@ def apply_local_settings_on_project_settings( sync_server_config["remote_site"] = remote_site -def get_system_settings(clear_metadata=True): +def get_system_settings(clear_metadata=True, exclude_locals=None): """System settings with applied studio overrides.""" default_values = get_default_settings()[SYSTEM_SETTINGS_KEY] studio_values = get_studio_system_settings_overrides() result = apply_overrides(default_values, studio_values) + + # Clear overrides metadata from settings if clear_metadata: clear_metadata_from_settings(result) + + # Apply local settings + # Default behavior is based on `clear_metadata` value + if exclude_locals is None: + exclude_locals = not clear_metadata + + if not exclude_locals: # TODO local settings may be required to apply for environments local_settings = get_local_settings() apply_local_settings_on_system_settings(result, local_settings) @@ -659,40 +723,52 @@ def get_system_settings(clear_metadata=True): return result -def get_default_project_settings(clear_metadata=True, exclude_locals=False): +def get_default_project_settings(clear_metadata=True, exclude_locals=None): """Project settings with applied studio's default project overrides.""" default_values = get_default_settings()[PROJECT_SETTINGS_KEY] studio_values = get_studio_project_settings_overrides() result = apply_overrides(default_values, studio_values) + # Clear overrides metadata from settings if clear_metadata: clear_metadata_from_settings(result) - if not exclude_locals: - local_settings = get_local_settings() - apply_local_settings_on_project_settings( - result, local_settings, None - ) + + # Apply local settings + if exclude_locals is None: + exclude_locals = not clear_metadata + + if not exclude_locals: + local_settings = get_local_settings() + apply_local_settings_on_project_settings( + result, local_settings, None + ) return result -def get_default_anatomy_settings(clear_metadata=True, exclude_locals=False): +def get_default_anatomy_settings(clear_metadata=True, exclude_locals=None): """Project anatomy data with applied studio's default project overrides.""" default_values = get_default_settings()[PROJECT_ANATOMY_KEY] studio_values = get_studio_project_anatomy_overrides() - # TODO uncomment and remove hotfix result when overrides of anatomy - # are stored correctly. result = apply_overrides(default_values, studio_values) + # Clear overrides metadata from settings if clear_metadata: clear_metadata_from_settings(result) - if not exclude_locals: - local_settings = get_local_settings() - apply_local_settings_on_anatomy_settings( - result, local_settings, None - ) + + # Apply local settings + if exclude_locals is None: + exclude_locals = not clear_metadata + + if not exclude_locals: + local_settings = get_local_settings() + apply_local_settings_on_anatomy_settings( + result, local_settings, None + ) return result -def get_anatomy_settings(project_name, site_name=None, exclude_locals=False): +def get_anatomy_settings( + project_name, site_name=None, clear_metadata=True, exclude_locals=None +): """Project anatomy data with applied studio and project overrides.""" if not project_name: raise ValueError( @@ -709,7 +785,13 @@ def get_anatomy_settings(project_name, site_name=None, exclude_locals=False): for key, value in project_overrides.items(): result[key] = value - clear_metadata_from_settings(result) + # Clear overrides metadata from settings + if clear_metadata: + clear_metadata_from_settings(result) + + # Apply local settings + if exclude_locals is None: + exclude_locals = not clear_metadata if not exclude_locals: local_settings = get_local_settings() @@ -719,7 +801,9 @@ def get_anatomy_settings(project_name, site_name=None, exclude_locals=False): return result -def get_project_settings(project_name, exclude_locals=False): +def get_project_settings( + project_name, clear_metadata=True, exclude_locals=None +): """Project settings with applied studio and project overrides.""" if not project_name: raise ValueError( @@ -733,7 +817,14 @@ def get_project_settings(project_name, exclude_locals=False): ) result = apply_overrides(studio_overrides, project_overrides) - clear_metadata_from_settings(result) + + # Clear overrides metadata from settings + if clear_metadata: + clear_metadata_from_settings(result) + + # Apply local settings + if exclude_locals is None: + exclude_locals = not clear_metadata if not exclude_locals: local_settings = get_local_settings() diff --git a/openpype/tools/launcher/actions.py b/openpype/tools/launcher/actions.py index 6261fe91ca..72c7aece72 100644 --- a/openpype/tools/launcher/actions.py +++ b/openpype/tools/launcher/actions.py @@ -1,7 +1,7 @@ import os -import importlib -from avalon import api, lib, style +from avalon import api, style +from openpype import PLUGINS_DIR from openpype.api import Logger, resources from openpype.lib import ( ApplictionExecutableNotFound, @@ -10,81 +10,6 @@ from openpype.lib import ( from Qt import QtWidgets, QtGui -class ProjectManagerAction(api.Action): - name = "projectmanager" - label = "Project Manager" - icon = "gear" - order = 999 # at the end - - def is_compatible(self, session): - return "AVALON_PROJECT" in session - - def process(self, session, **kwargs): - return lib.launch( - executable="python", - args=[ - "-u", "-m", "avalon.tools.projectmanager", - session['AVALON_PROJECT'] - ] - ) - - -class LoaderAction(api.Action): - name = "loader" - label = "Loader" - icon = "cloud-download" - order = 998 - - def is_compatible(self, session): - return "AVALON_PROJECT" in session - - def process(self, session, **kwargs): - return lib.launch( - executable="python", - args=[ - "-u", "-m", "avalon.tools.loader", session['AVALON_PROJECT'] - ] - ) - - -class LoaderLibrary(api.Action): - name = "loader_os" - label = "Library Loader" - icon = "book" - order = 997 # at the end - - def is_compatible(self, session): - return True - - def process(self, session, **kwargs): - return lib.launch( - executable="python", - args=["-u", "-m", "avalon.tools.libraryloader"] - ) - - -def register_default_actions(): - """Register default actions for Launcher""" - api.register_plugin(api.Action, ProjectManagerAction) - api.register_plugin(api.Action, LoaderAction) - api.register_plugin(api.Action, LoaderLibrary) - - -def register_config_actions(): - """Register actions from the configuration for Launcher""" - - module_name = os.environ["AVALON_CONFIG"] - config = importlib.import_module(module_name) - if not hasattr(config, "register_launcher_actions"): - print( - "Current configuration `%s` has no 'register_launcher_actions'" - % config.__name__ - ) - return - - config.register_launcher_actions() - - def register_actions_from_paths(paths): if not paths: return @@ -106,6 +31,13 @@ def register_actions_from_paths(paths): api.register_plugin_path(api.Action, path) +def register_config_actions(): + """Register actions from the configuration for Launcher""" + + actions_dir = os.path.join(PLUGINS_DIR, "actions") + register_actions_from_paths([actions_dir]) + + def register_environment_actions(): """Register actions from AVALON_ACTIONS for Launcher.""" diff --git a/openpype/tools/launcher/window.py b/openpype/tools/launcher/window.py index a89e724f1c..c0aeec7d2f 100644 --- a/openpype/tools/launcher/window.py +++ b/openpype/tools/launcher/window.py @@ -11,6 +11,7 @@ from avalon.tools import lib as tools_lib from avalon.tools.widgets import AssetWidget from avalon.vendor import qtawesome from .models import ProjectModel +from .lib import get_action_label from .widgets import ( ProjectBar, ActionBar, @@ -407,7 +408,7 @@ class LauncherWindow(QtWidgets.QDialog): self.discover_actions() def on_action_clicked(self, action): - self.echo("Running action: {}".format(action.name)) + self.echo("Running action: {}".format(get_action_label(action))) self.run_action(action) def on_history_action(self, history_data): diff --git a/openpype/tools/settings/local_settings/apps_widget.py b/openpype/tools/settings/local_settings/apps_widget.py index bc27a3c1c4..2e12c010d1 100644 --- a/openpype/tools/settings/local_settings/apps_widget.py +++ b/openpype/tools/settings/local_settings/apps_widget.py @@ -10,12 +10,20 @@ from .constants import CHILD_OFFSET class AppVariantWidget(QtWidgets.QWidget): exec_placeholder = "< Specific path for this machine >" - def __init__(self, group_label, variant_entity, parent): + def __init__(self, group_label, variant_name, variant_entity, parent): super(AppVariantWidget, self).__init__(parent) self.executable_input_widget = None + variant_label = variant_entity.label + if variant_label is None: + parent_entity = variant_entity.parent + if hasattr(parent_entity, "get_key_label"): + variant_label = parent_entity.get_key_label(variant_name) - label = " ".join([group_label, variant_entity.label]) + if not variant_label: + variant_label = variant_name + + label = " ".join([group_label, variant_label]) expading_widget = ExpandingWidget(label, self) content_widget = QtWidgets.QWidget(expading_widget) @@ -102,7 +110,7 @@ class AppGroupWidget(QtWidgets.QWidget): valid_variants = {} for key, entity in group_entity["variants"].items(): - if entity["enabled"].value: + if "enabled" not in entity or entity["enabled"].value: valid_variants[key] = entity group_label = group_entity.label @@ -114,7 +122,7 @@ class AppGroupWidget(QtWidgets.QWidget): widgets_by_variant_name = {} for variant_name, variant_entity in valid_variants.items(): variant_widget = AppVariantWidget( - group_label, variant_entity, content_widget + group_label, variant_name, variant_entity, content_widget ) widgets_by_variant_name[variant_name] = variant_widget content_layout.addWidget(variant_widget) @@ -173,7 +181,10 @@ class LocalApplicationsWidgets(QtWidgets.QWidget): # Check if has enabled any variant enabled_variant = False for variant_entity in entity["variants"].values(): - if variant_entity["enabled"].value: + if ( + "enabled" not in variant_entity + or variant_entity["enabled"].value + ): enabled_variant = True break diff --git a/openpype/tools/settings/local_settings/general_widget.py b/openpype/tools/settings/local_settings/general_widget.py index 7732157122..78bc53fdd2 100644 --- a/openpype/tools/settings/local_settings/general_widget.py +++ b/openpype/tools/settings/local_settings/general_widget.py @@ -1,3 +1,5 @@ +import getpass + from Qt import QtWidgets @@ -5,28 +7,29 @@ class LocalGeneralWidgets(QtWidgets.QWidget): def __init__(self, parent): super(LocalGeneralWidgets, self).__init__(parent) - local_site_name_input = QtWidgets.QLineEdit(self) + username_input = QtWidgets.QLineEdit(self) + username_input.setPlaceholderText(getpass.getuser()) layout = QtWidgets.QFormLayout(self) layout.setContentsMargins(0, 0, 0, 0) - layout.addRow("Local site label", local_site_name_input) + layout.addRow("OpenPype Username", username_input) - self.local_site_name_input = local_site_name_input + self.username_input = username_input def update_local_settings(self, value): - site_label = "" + username = "" if value: - site_label = value.get("site_label", site_label) - self.local_site_name_input.setText(site_label) + username = value.get("username", username) + self.username_input.setText(username) def settings_value(self): # Add changed # If these have changed then output = {} - local_site_name = self.local_site_name_input.text() - if local_site_name: - output["site_label"] = local_site_name + username = self.username_input.text() + if username: + output["username"] = username # Do not return output yet since we don't have mechanism to save or # load these data through api calls return output diff --git a/openpype/tools/settings/settings/style/__init__.py b/openpype/tools/settings/settings/style/__init__.py index 9bb5e851b4..5a57642ee1 100644 --- a/openpype/tools/settings/settings/style/__init__.py +++ b/openpype/tools/settings/settings/style/__init__.py @@ -1,4 +1,5 @@ import os +from openpype import resources def load_stylesheet(): @@ -9,4 +10,4 @@ def load_stylesheet(): def app_icon_path(): - return os.path.join(os.path.dirname(__file__), "openpype_icon.png") + return resources.pype_icon_filepath() diff --git a/openpype/tools/settings/settings/style/pype_icon.png b/openpype/tools/settings/settings/style/pype_icon.png deleted file mode 100644 index bfacf6eeed..0000000000 Binary files a/openpype/tools/settings/settings/style/pype_icon.png and /dev/null differ diff --git a/openpype/tools/settings/settings/widgets/categories.py b/openpype/tools/settings/settings/widgets/categories.py index 9d286485a3..e4832c989a 100644 --- a/openpype/tools/settings/settings/widgets/categories.py +++ b/openpype/tools/settings/settings/widgets/categories.py @@ -27,7 +27,7 @@ from openpype.settings.entities import ( SchemaError ) -from openpype.settings.lib import get_system_settings +from openpype.settings import SaveWarningExc from .widgets import ProjectListWidget from . import lib @@ -272,6 +272,22 @@ class SettingsCategoryWidget(QtWidgets.QWidget): # not required. self.reset() + except SaveWarningExc as exc: + warnings = [ + "Settings were saved but few issues happened." + ] + for item in exc.warnings: + warnings.append(item.replace("\n", "
")) + + msg = "

".join(warnings) + + dialog = QtWidgets.QMessageBox(self) + dialog.setText(msg) + dialog.setIcon(QtWidgets.QMessageBox.Warning) + dialog.exec_() + + self.reset() + except Exception as exc: formatted_traceback = traceback.format_exception(*sys.exc_info()) dialog = QtWidgets.QMessageBox(self) diff --git a/openpype/tools/settings/settings/widgets/dict_mutable_widget.py b/openpype/tools/settings/settings/widgets/dict_mutable_widget.py index 3b5f15f519..ff4905c480 100644 --- a/openpype/tools/settings/settings/widgets/dict_mutable_widget.py +++ b/openpype/tools/settings/settings/widgets/dict_mutable_widget.py @@ -36,6 +36,7 @@ class ModifiableDictEmptyItem(QtWidgets.QWidget): super(ModifiableDictEmptyItem, self).__init__(parent) self.entity_widget = entity_widget self.collapsible_key = entity_widget.entity.collapsible_key + self.ignore_input_changes = entity_widget.ignore_input_changes self.is_duplicated = False self.key_is_valid = False @@ -101,9 +102,16 @@ class ModifiableDictEmptyItem(QtWidgets.QWidget): def _on_key_change(self): key = self.key_input.text() self.key_is_valid = KEY_REGEX.match(key) + + if self.ignore_input_changes: + return + self.is_duplicated = self.entity_widget.is_key_duplicated(key) key_input_state = "" - if self.is_duplicated or not self.key_is_valid: + # Collapsible key and empty key are not invalid + if self.collapsible_key and self.key_input.text() == "": + pass + elif self.is_duplicated or not self.key_is_valid: key_input_state = "invalid" elif key != "": key_input_state = "modified" @@ -352,6 +360,7 @@ class ModifiableDictItem(QtWidgets.QWidget): def set_label(self, label): if self.key_label_input and label is not None: self.key_label_input.setText(label) + self.update_key_label() def set_as_required(self, key): self.key_input.setText(key) @@ -383,6 +392,9 @@ class ModifiableDictItem(QtWidgets.QWidget): self.set_edit_mode(False) def _on_key_label_change(self): + if self.ignore_input_changes: + return + label = self.key_label_value() self.entity_widget.change_label(label, self) self.update_key_label() @@ -390,6 +402,10 @@ class ModifiableDictItem(QtWidgets.QWidget): def _on_key_change(self): key = self.key_value() self.key_is_valid = KEY_REGEX.match(key) + + if self.ignore_input_changes: + return + is_key_duplicated = self.entity_widget.validate_key_duplication( self.temp_key, key, self ) @@ -419,7 +435,7 @@ class ModifiableDictItem(QtWidgets.QWidget): self.wrapper_widget.label_widget.setText(label) def on_add_clicked(self): - widget = self.entity_widget.add_new_key(None, None, self) + widget = self.entity_widget.add_new_key(None, None) widget.key_input.setFocus(True) def on_edit_pressed(self): @@ -550,7 +566,9 @@ class DictMutableKeysWidget(BaseWidget): label = self.entity.label body_widget = None + content_left_margin = 0 if label: + content_left_margin = CHILD_OFFSET body_widget = ExpandingWidget(label, self) main_layout.addWidget(body_widget) label = None @@ -565,7 +583,9 @@ class DictMutableKeysWidget(BaseWidget): content_widget.setObjectName("ContentWidget") content_widget.setProperty("content_state", content_state) content_layout = QtWidgets.QVBoxLayout(content_widget) - content_layout.setContentsMargins(CHILD_OFFSET, 5, 0, bottom_margin) + content_layout.setContentsMargins( + content_left_margin, 5, 0, bottom_margin + ) if body_widget is None: main_layout.addWidget(content_widget) @@ -614,7 +634,7 @@ class DictMutableKeysWidget(BaseWidget): # TODO implement pass - def add_new_key(self, key, label=None, after_widget=None): + def add_new_key(self, key, label=None): uuid_key = None entity_key = key if not key: @@ -634,7 +654,7 @@ class DictMutableKeysWidget(BaseWidget): # Backup solution (for testing) if input_field is None: - input_field = self.add_widget_for_child(child_entity, after_widget) + input_field = self.add_widget_for_child(child_entity) if key: # Happens when created from collapsible key items where key @@ -712,29 +732,16 @@ class DictMutableKeysWidget(BaseWidget): return self.entity.set_child_label(entity, label) - def add_widget_for_child( - self, child_entity, after_widget=None, first=False - ): - if first: - new_widget_index = 0 - else: - new_widget_index = len(self.input_fields) - - if self.input_fields and not first: - if not after_widget: - after_widget = self.input_fields[-1] - - for idx in range(self.content_layout.count()): - item = self.content_layout.itemAt(idx) - if item.widget() is after_widget: - new_widget_index = idx + 1 - break - + def add_widget_for_child(self, child_entity): input_field = ModifiableDictItem( self.entity.collapsible_key, child_entity, self ) self.input_fields.append(input_field) + + new_widget_index = self.content_layout.count() - 1 + self.content_layout.insertWidget(new_widget_index, input_field) + return input_field def remove_row(self, widget): @@ -803,21 +810,15 @@ class DictMutableKeysWidget(BaseWidget): for key, child_entity in self.entity.items(): found = False - previous_input = None for input_field in self.input_fields: - if input_field.entity is not child_entity: - previous_input = input_field - else: + if input_field.entity is child_entity: found = True break if not found: changed = True - args = [previous_input] - if previous_input is None: - args.append(True) - _input_field = self.add_widget_for_child(child_entity, *args) + _input_field = self.add_widget_for_child(child_entity) _input_field.origin_key = key _input_field.set_key(key) if self.entity.collapsible_key: @@ -848,9 +849,8 @@ class DictMutableKeysWidget(BaseWidget): if keys_order: last_required = keys_order[-1] for key in self.entity.keys(): - if key in keys_order: - continue - keys_order.append(key) + if key not in keys_order: + keys_order.append(key) for key in keys_order: child_entity = self.entity[key] diff --git a/openpype/tools/settings/settings/widgets/multiselection_combobox.py b/openpype/tools/settings/settings/widgets/multiselection_combobox.py index da9cdd75cf..30ecb7b84b 100644 --- a/openpype/tools/settings/settings/widgets/multiselection_combobox.py +++ b/openpype/tools/settings/settings/widgets/multiselection_combobox.py @@ -262,7 +262,10 @@ class MultiSelectionComboBox(QtWidgets.QComboBox): self.lines[line] = [item] line += 1 else: - self.lines[line].append(item) + if line in self.lines: + self.lines[line].append(item) + else: + self.lines[line] = [item] left_x = left_x + width + self.item_spacing self.update() diff --git a/openpype/tools/standalonepublish/widgets/widget_components.py b/openpype/tools/standalonepublish/widgets/widget_components.py index e6682d97aa..2ac54af4e3 100644 --- a/openpype/tools/standalonepublish/widgets/widget_components.py +++ b/openpype/tools/standalonepublish/widgets/widget_components.py @@ -160,32 +160,11 @@ def set_context(project, asset, task): os.environ["AVALON_TASK"] = task io.Session["AVALON_TASK"] = task - io.install() - - av_project = io.find_one({'type': 'project'}) - av_asset = io.find_one({ - "type": 'asset', - "name": asset - }) - - parents = av_asset['data']['parents'] - hierarchy = '' - if parents and len(parents) > 0: - hierarchy = os.path.sep.join(parents) - - os.environ["AVALON_HIERARCHY"] = hierarchy - io.Session["AVALON_HIERARCHY"] = hierarchy - - os.environ["AVALON_PROJECTCODE"] = av_project['data'].get('code', '') - io.Session["AVALON_PROJECTCODE"] = av_project['data'].get('code', '') - io.Session["current_dir"] = os.path.normpath(os.getcwd()) os.environ["AVALON_APP"] = HOST_NAME io.Session["AVALON_APP"] = HOST_NAME - io.uninstall() - def cli_publish(data, publish_paths, gui=True): PUBLISH_SCRIPT_PATH = os.path.join( diff --git a/openpype/tools/tray/pype_info_widget.py b/openpype/tools/tray/pype_info_widget.py index dbff36eca7..bbb92f175f 100644 --- a/openpype/tools/tray/pype_info_widget.py +++ b/openpype/tools/tray/pype_info_widget.py @@ -111,6 +111,13 @@ class EnvironmentsView(QtWidgets.QTreeView): else: return super(EnvironmentsView, self).keyPressEvent(event) + def wheelEvent(self, event): + if not self.hasFocus(): + event.ignore() + return + return super(EnvironmentsView, self).wheelEvent(event) + + class ClickableWidget(QtWidgets.QWidget): clicked = QtCore.Signal() @@ -195,8 +202,6 @@ class CollapsibleWidget(QtWidgets.QWidget): class PypeInfoWidget(QtWidgets.QWidget): - not_applicable = "N/A" - def __init__(self, parent=None): super(PypeInfoWidget, self).__init__(parent) @@ -206,17 +211,21 @@ class PypeInfoWidget(QtWidgets.QWidget): self.setWindowIcon(icon) self.setWindowTitle("OpenPype info") + scroll_area = QtWidgets.QScrollArea(self) + info_widget = PypeInfoSubWidget(scroll_area) + + scroll_area.setWidget(info_widget) + scroll_area.setWidgetResizable(True) + main_layout = QtWidgets.QVBoxLayout(self) - main_layout.setAlignment(QtCore.Qt.AlignTop) - main_layout.addWidget(self._create_openpype_info_widget(), 0) - main_layout.addWidget(self._create_separator(), 0) - main_layout.addWidget(self._create_workstation_widget(), 0) - main_layout.addWidget(self._create_separator(), 0) - main_layout.addWidget(self._create_local_settings_widget(), 0) - main_layout.addWidget(self._create_separator(), 0) - main_layout.addWidget(self._create_environ_widget(), 1) + main_layout.addWidget(scroll_area, 1) main_layout.addWidget(self._create_btns_section(), 0) + self.resize(740, 540) + + self.scroll_area = scroll_area + self.info_widget = info_widget + def _create_btns_section(self): btns_widget = QtWidgets.QWidget(self) btns_layout = QtWidgets.QHBoxLayout(btns_widget) @@ -268,6 +277,24 @@ class PypeInfoWidget(QtWidgets.QWidget): mime_data ) + +class PypeInfoSubWidget(QtWidgets.QWidget): + not_applicable = "N/A" + + def __init__(self, parent=None): + super(PypeInfoSubWidget, self).__init__(parent) + + main_layout = QtWidgets.QVBoxLayout(self) + main_layout.setContentsMargins(0, 0, 0, 0) + main_layout.setAlignment(QtCore.Qt.AlignTop) + main_layout.addWidget(self._create_openpype_info_widget(), 0) + main_layout.addWidget(self._create_separator(), 0) + main_layout.addWidget(self._create_workstation_widget(), 0) + main_layout.addWidget(self._create_separator(), 0) + main_layout.addWidget(self._create_local_settings_widget(), 0) + main_layout.addWidget(self._create_separator(), 0) + main_layout.addWidget(self._create_environ_widget(), 1) + def _create_separator(self): separator_widget = QtWidgets.QWidget(self) separator_widget.setStyleSheet("background: #222222;") @@ -322,6 +349,7 @@ class PypeInfoWidget(QtWidgets.QWidget): ) wokstation_info_widget.set_content_widget(info_widget) + wokstation_info_widget.toggle_content() return wokstation_info_widget @@ -342,6 +370,7 @@ class PypeInfoWidget(QtWidgets.QWidget): env_widget = CollapsibleWidget("Environments", self) env_view = EnvironmentsView(env_widget) + env_view.setMinimumHeight(300) env_widget.set_content_widget(env_view) @@ -363,7 +392,7 @@ class PypeInfoWidget(QtWidgets.QWidget): "version_value": "OpenPype version:", "executable": "OpenPype executable:", "pype_root": "OpenPype location:", - "mongo_url": "OpenPype Mongo URL:" + "mongo_url": "OpenPype Mongo URL:" } # Prepare keys order keys_order = ["version_value", "executable", "pype_root", "mongo_url"] diff --git a/openpype/vendor/python/python_2/dns/__init__.py b/openpype/vendor/python/python_2/dns/__init__.py new file mode 100644 index 0000000000..c1ce8e6061 --- /dev/null +++ b/openpype/vendor/python/python_2/dns/__init__.py @@ -0,0 +1,56 @@ +# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license + +# Copyright (C) 2003-2007, 2009, 2011 Nominum, Inc. +# +# Permission to use, copy, modify, and distribute this software and its +# documentation for any purpose with or without fee is hereby granted, +# provided that the above copyright notice and this permission notice +# appear in all copies. +# +# THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES +# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR +# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT +# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. + +"""dnspython DNS toolkit""" + +__all__ = [ + 'dnssec', + 'e164', + 'edns', + 'entropy', + 'exception', + 'flags', + 'hash', + 'inet', + 'ipv4', + 'ipv6', + 'message', + 'name', + 'namedict', + 'node', + 'opcode', + 'query', + 'rcode', + 'rdata', + 'rdataclass', + 'rdataset', + 'rdatatype', + 'renderer', + 'resolver', + 'reversename', + 'rrset', + 'set', + 'tokenizer', + 'tsig', + 'tsigkeyring', + 'ttl', + 'rdtypes', + 'update', + 'version', + 'wiredata', + 'zone', +] diff --git a/openpype/vendor/python/python_2/dns/_compat.py b/openpype/vendor/python/python_2/dns/_compat.py new file mode 100644 index 0000000000..ca0931c2b5 --- /dev/null +++ b/openpype/vendor/python/python_2/dns/_compat.py @@ -0,0 +1,59 @@ +import sys +import decimal +from decimal import Context + +PY3 = sys.version_info[0] == 3 +PY2 = sys.version_info[0] == 2 + + +if PY3: + long = int + xrange = range +else: + long = long # pylint: disable=long-builtin + xrange = xrange # pylint: disable=xrange-builtin + +# unicode / binary types +if PY3: + text_type = str + binary_type = bytes + string_types = (str,) + unichr = chr + def maybe_decode(x): + return x.decode() + def maybe_encode(x): + return x.encode() + def maybe_chr(x): + return x + def maybe_ord(x): + return x +else: + text_type = unicode # pylint: disable=unicode-builtin, undefined-variable + binary_type = str + string_types = ( + basestring, # pylint: disable=basestring-builtin, undefined-variable + ) + unichr = unichr # pylint: disable=unichr-builtin + def maybe_decode(x): + return x + def maybe_encode(x): + return x + def maybe_chr(x): + return chr(x) + def maybe_ord(x): + return ord(x) + + +def round_py2_compat(what): + """ + Python 2 and Python 3 use different rounding strategies in round(). This + function ensures that results are python2/3 compatible and backward + compatible with previous py2 releases + :param what: float + :return: rounded long + """ + d = Context( + prec=len(str(long(what))), # round to integer with max precision + rounding=decimal.ROUND_HALF_UP + ).create_decimal(str(what)) # str(): python 2.6 compat + return long(d) diff --git a/openpype/vendor/python/python_2/dns/dnssec.py b/openpype/vendor/python/python_2/dns/dnssec.py new file mode 100644 index 0000000000..35da6b5a81 --- /dev/null +++ b/openpype/vendor/python/python_2/dns/dnssec.py @@ -0,0 +1,519 @@ +# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license + +# Copyright (C) 2003-2017 Nominum, Inc. +# +# Permission to use, copy, modify, and distribute this software and its +# documentation for any purpose with or without fee is hereby granted, +# provided that the above copyright notice and this permission notice +# appear in all copies. +# +# THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES +# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR +# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT +# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. + +"""Common DNSSEC-related functions and constants.""" + +from io import BytesIO +import struct +import time + +import dns.exception +import dns.name +import dns.node +import dns.rdataset +import dns.rdata +import dns.rdatatype +import dns.rdataclass +from ._compat import string_types + + +class UnsupportedAlgorithm(dns.exception.DNSException): + """The DNSSEC algorithm is not supported.""" + + +class ValidationFailure(dns.exception.DNSException): + """The DNSSEC signature is invalid.""" + + +#: RSAMD5 +RSAMD5 = 1 +#: DH +DH = 2 +#: DSA +DSA = 3 +#: ECC +ECC = 4 +#: RSASHA1 +RSASHA1 = 5 +#: DSANSEC3SHA1 +DSANSEC3SHA1 = 6 +#: RSASHA1NSEC3SHA1 +RSASHA1NSEC3SHA1 = 7 +#: RSASHA256 +RSASHA256 = 8 +#: RSASHA512 +RSASHA512 = 10 +#: ECDSAP256SHA256 +ECDSAP256SHA256 = 13 +#: ECDSAP384SHA384 +ECDSAP384SHA384 = 14 +#: INDIRECT +INDIRECT = 252 +#: PRIVATEDNS +PRIVATEDNS = 253 +#: PRIVATEOID +PRIVATEOID = 254 + +_algorithm_by_text = { + 'RSAMD5': RSAMD5, + 'DH': DH, + 'DSA': DSA, + 'ECC': ECC, + 'RSASHA1': RSASHA1, + 'DSANSEC3SHA1': DSANSEC3SHA1, + 'RSASHA1NSEC3SHA1': RSASHA1NSEC3SHA1, + 'RSASHA256': RSASHA256, + 'RSASHA512': RSASHA512, + 'INDIRECT': INDIRECT, + 'ECDSAP256SHA256': ECDSAP256SHA256, + 'ECDSAP384SHA384': ECDSAP384SHA384, + 'PRIVATEDNS': PRIVATEDNS, + 'PRIVATEOID': PRIVATEOID, +} + +# We construct the inverse mapping programmatically to ensure that we +# cannot make any mistakes (e.g. omissions, cut-and-paste errors) that +# would cause the mapping not to be true inverse. + +_algorithm_by_value = {y: x for x, y in _algorithm_by_text.items()} + + +def algorithm_from_text(text): + """Convert text into a DNSSEC algorithm value. + + Returns an ``int``. + """ + + value = _algorithm_by_text.get(text.upper()) + if value is None: + value = int(text) + return value + + +def algorithm_to_text(value): + """Convert a DNSSEC algorithm value to text + + Returns a ``str``. + """ + + text = _algorithm_by_value.get(value) + if text is None: + text = str(value) + return text + + +def _to_rdata(record, origin): + s = BytesIO() + record.to_wire(s, origin=origin) + return s.getvalue() + + +def key_id(key, origin=None): + """Return the key id (a 16-bit number) for the specified key. + + Note the *origin* parameter of this function is historical and + is not needed. + + Returns an ``int`` between 0 and 65535. + """ + + rdata = _to_rdata(key, origin) + rdata = bytearray(rdata) + if key.algorithm == RSAMD5: + return (rdata[-3] << 8) + rdata[-2] + else: + total = 0 + for i in range(len(rdata) // 2): + total += (rdata[2 * i] << 8) + \ + rdata[2 * i + 1] + if len(rdata) % 2 != 0: + total += rdata[len(rdata) - 1] << 8 + total += ((total >> 16) & 0xffff) + return total & 0xffff + + +def make_ds(name, key, algorithm, origin=None): + """Create a DS record for a DNSSEC key. + + *name* is the owner name of the DS record. + + *key* is a ``dns.rdtypes.ANY.DNSKEY``. + + *algorithm* is a string describing which hash algorithm to use. The + currently supported hashes are "SHA1" and "SHA256". Case does not + matter for these strings. + + *origin* is a ``dns.name.Name`` and will be used as the origin + if *key* is a relative name. + + Returns a ``dns.rdtypes.ANY.DS``. + """ + + if algorithm.upper() == 'SHA1': + dsalg = 1 + hash = SHA1.new() + elif algorithm.upper() == 'SHA256': + dsalg = 2 + hash = SHA256.new() + else: + raise UnsupportedAlgorithm('unsupported algorithm "%s"' % algorithm) + + if isinstance(name, string_types): + name = dns.name.from_text(name, origin) + hash.update(name.canonicalize().to_wire()) + hash.update(_to_rdata(key, origin)) + digest = hash.digest() + + dsrdata = struct.pack("!HBB", key_id(key), key.algorithm, dsalg) + digest + return dns.rdata.from_wire(dns.rdataclass.IN, dns.rdatatype.DS, dsrdata, 0, + len(dsrdata)) + + +def _find_candidate_keys(keys, rrsig): + candidate_keys = [] + value = keys.get(rrsig.signer) + if value is None: + return None + if isinstance(value, dns.node.Node): + try: + rdataset = value.find_rdataset(dns.rdataclass.IN, + dns.rdatatype.DNSKEY) + except KeyError: + return None + else: + rdataset = value + for rdata in rdataset: + if rdata.algorithm == rrsig.algorithm and \ + key_id(rdata) == rrsig.key_tag: + candidate_keys.append(rdata) + return candidate_keys + + +def _is_rsa(algorithm): + return algorithm in (RSAMD5, RSASHA1, + RSASHA1NSEC3SHA1, RSASHA256, + RSASHA512) + + +def _is_dsa(algorithm): + return algorithm in (DSA, DSANSEC3SHA1) + + +def _is_ecdsa(algorithm): + return _have_ecdsa and (algorithm in (ECDSAP256SHA256, ECDSAP384SHA384)) + + +def _is_md5(algorithm): + return algorithm == RSAMD5 + + +def _is_sha1(algorithm): + return algorithm in (DSA, RSASHA1, + DSANSEC3SHA1, RSASHA1NSEC3SHA1) + + +def _is_sha256(algorithm): + return algorithm in (RSASHA256, ECDSAP256SHA256) + + +def _is_sha384(algorithm): + return algorithm == ECDSAP384SHA384 + + +def _is_sha512(algorithm): + return algorithm == RSASHA512 + + +def _make_hash(algorithm): + if _is_md5(algorithm): + return MD5.new() + if _is_sha1(algorithm): + return SHA1.new() + if _is_sha256(algorithm): + return SHA256.new() + if _is_sha384(algorithm): + return SHA384.new() + if _is_sha512(algorithm): + return SHA512.new() + raise ValidationFailure('unknown hash for algorithm %u' % algorithm) + + +def _make_algorithm_id(algorithm): + if _is_md5(algorithm): + oid = [0x2a, 0x86, 0x48, 0x86, 0xf7, 0x0d, 0x02, 0x05] + elif _is_sha1(algorithm): + oid = [0x2b, 0x0e, 0x03, 0x02, 0x1a] + elif _is_sha256(algorithm): + oid = [0x60, 0x86, 0x48, 0x01, 0x65, 0x03, 0x04, 0x02, 0x01] + elif _is_sha512(algorithm): + oid = [0x60, 0x86, 0x48, 0x01, 0x65, 0x03, 0x04, 0x02, 0x03] + else: + raise ValidationFailure('unknown algorithm %u' % algorithm) + olen = len(oid) + dlen = _make_hash(algorithm).digest_size + idbytes = [0x30] + [8 + olen + dlen] + \ + [0x30, olen + 4] + [0x06, olen] + oid + \ + [0x05, 0x00] + [0x04, dlen] + return struct.pack('!%dB' % len(idbytes), *idbytes) + + +def _validate_rrsig(rrset, rrsig, keys, origin=None, now=None): + """Validate an RRset against a single signature rdata + + The owner name of *rrsig* is assumed to be the same as the owner name + of *rrset*. + + *rrset* is the RRset to validate. It can be a ``dns.rrset.RRset`` or + a ``(dns.name.Name, dns.rdataset.Rdataset)`` tuple. + + *rrsig* is a ``dns.rdata.Rdata``, the signature to validate. + + *keys* is the key dictionary, used to find the DNSKEY associated with + a given name. The dictionary is keyed by a ``dns.name.Name``, and has + ``dns.node.Node`` or ``dns.rdataset.Rdataset`` values. + + *origin* is a ``dns.name.Name``, the origin to use for relative names. + + *now* is an ``int``, the time to use when validating the signatures, + in seconds since the UNIX epoch. The default is the current time. + """ + + if isinstance(origin, string_types): + origin = dns.name.from_text(origin, dns.name.root) + + candidate_keys = _find_candidate_keys(keys, rrsig) + if candidate_keys is None: + raise ValidationFailure('unknown key') + + for candidate_key in candidate_keys: + # For convenience, allow the rrset to be specified as a (name, + # rdataset) tuple as well as a proper rrset + if isinstance(rrset, tuple): + rrname = rrset[0] + rdataset = rrset[1] + else: + rrname = rrset.name + rdataset = rrset + + if now is None: + now = time.time() + if rrsig.expiration < now: + raise ValidationFailure('expired') + if rrsig.inception > now: + raise ValidationFailure('not yet valid') + + hash = _make_hash(rrsig.algorithm) + + if _is_rsa(rrsig.algorithm): + keyptr = candidate_key.key + (bytes_,) = struct.unpack('!B', keyptr[0:1]) + keyptr = keyptr[1:] + if bytes_ == 0: + (bytes_,) = struct.unpack('!H', keyptr[0:2]) + keyptr = keyptr[2:] + rsa_e = keyptr[0:bytes_] + rsa_n = keyptr[bytes_:] + try: + pubkey = CryptoRSA.construct( + (number.bytes_to_long(rsa_n), + number.bytes_to_long(rsa_e))) + except ValueError: + raise ValidationFailure('invalid public key') + sig = rrsig.signature + elif _is_dsa(rrsig.algorithm): + keyptr = candidate_key.key + (t,) = struct.unpack('!B', keyptr[0:1]) + keyptr = keyptr[1:] + octets = 64 + t * 8 + dsa_q = keyptr[0:20] + keyptr = keyptr[20:] + dsa_p = keyptr[0:octets] + keyptr = keyptr[octets:] + dsa_g = keyptr[0:octets] + keyptr = keyptr[octets:] + dsa_y = keyptr[0:octets] + pubkey = CryptoDSA.construct( + (number.bytes_to_long(dsa_y), + number.bytes_to_long(dsa_g), + number.bytes_to_long(dsa_p), + number.bytes_to_long(dsa_q))) + sig = rrsig.signature[1:] + elif _is_ecdsa(rrsig.algorithm): + # use ecdsa for NIST-384p -- not currently supported by pycryptodome + + keyptr = candidate_key.key + + if rrsig.algorithm == ECDSAP256SHA256: + curve = ecdsa.curves.NIST256p + key_len = 32 + elif rrsig.algorithm == ECDSAP384SHA384: + curve = ecdsa.curves.NIST384p + key_len = 48 + + x = number.bytes_to_long(keyptr[0:key_len]) + y = number.bytes_to_long(keyptr[key_len:key_len * 2]) + if not ecdsa.ecdsa.point_is_valid(curve.generator, x, y): + raise ValidationFailure('invalid ECDSA key') + point = ecdsa.ellipticcurve.Point(curve.curve, x, y, curve.order) + verifying_key = ecdsa.keys.VerifyingKey.from_public_point(point, + curve) + pubkey = ECKeyWrapper(verifying_key, key_len) + r = rrsig.signature[:key_len] + s = rrsig.signature[key_len:] + sig = ecdsa.ecdsa.Signature(number.bytes_to_long(r), + number.bytes_to_long(s)) + + else: + raise ValidationFailure('unknown algorithm %u' % rrsig.algorithm) + + hash.update(_to_rdata(rrsig, origin)[:18]) + hash.update(rrsig.signer.to_digestable(origin)) + + if rrsig.labels < len(rrname) - 1: + suffix = rrname.split(rrsig.labels + 1)[1] + rrname = dns.name.from_text('*', suffix) + rrnamebuf = rrname.to_digestable(origin) + rrfixed = struct.pack('!HHI', rdataset.rdtype, rdataset.rdclass, + rrsig.original_ttl) + rrlist = sorted(rdataset) + for rr in rrlist: + hash.update(rrnamebuf) + hash.update(rrfixed) + rrdata = rr.to_digestable(origin) + rrlen = struct.pack('!H', len(rrdata)) + hash.update(rrlen) + hash.update(rrdata) + + try: + if _is_rsa(rrsig.algorithm): + verifier = pkcs1_15.new(pubkey) + # will raise ValueError if verify fails: + verifier.verify(hash, sig) + elif _is_dsa(rrsig.algorithm): + verifier = DSS.new(pubkey, 'fips-186-3') + verifier.verify(hash, sig) + elif _is_ecdsa(rrsig.algorithm): + digest = hash.digest() + if not pubkey.verify(digest, sig): + raise ValueError + else: + # Raise here for code clarity; this won't actually ever happen + # since if the algorithm is really unknown we'd already have + # raised an exception above + raise ValidationFailure('unknown algorithm %u' % rrsig.algorithm) + # If we got here, we successfully verified so we can return without error + return + except ValueError: + # this happens on an individual validation failure + continue + # nothing verified -- raise failure: + raise ValidationFailure('verify failure') + + +def _validate(rrset, rrsigset, keys, origin=None, now=None): + """Validate an RRset. + + *rrset* is the RRset to validate. It can be a ``dns.rrset.RRset`` or + a ``(dns.name.Name, dns.rdataset.Rdataset)`` tuple. + + *rrsigset* is the signature RRset to be validated. It can be a + ``dns.rrset.RRset`` or a ``(dns.name.Name, dns.rdataset.Rdataset)`` tuple. + + *keys* is the key dictionary, used to find the DNSKEY associated with + a given name. The dictionary is keyed by a ``dns.name.Name``, and has + ``dns.node.Node`` or ``dns.rdataset.Rdataset`` values. + + *origin* is a ``dns.name.Name``, the origin to use for relative names. + + *now* is an ``int``, the time to use when validating the signatures, + in seconds since the UNIX epoch. The default is the current time. + """ + + if isinstance(origin, string_types): + origin = dns.name.from_text(origin, dns.name.root) + + if isinstance(rrset, tuple): + rrname = rrset[0] + else: + rrname = rrset.name + + if isinstance(rrsigset, tuple): + rrsigname = rrsigset[0] + rrsigrdataset = rrsigset[1] + else: + rrsigname = rrsigset.name + rrsigrdataset = rrsigset + + rrname = rrname.choose_relativity(origin) + rrsigname = rrsigname.choose_relativity(origin) + if rrname != rrsigname: + raise ValidationFailure("owner names do not match") + + for rrsig in rrsigrdataset: + try: + _validate_rrsig(rrset, rrsig, keys, origin, now) + return + except ValidationFailure: + pass + raise ValidationFailure("no RRSIGs validated") + + +def _need_pycrypto(*args, **kwargs): + raise NotImplementedError("DNSSEC validation requires pycryptodome/pycryptodomex") + + +try: + try: + # test we're using pycryptodome, not pycrypto (which misses SHA1 for example) + from Crypto.Hash import MD5, SHA1, SHA256, SHA384, SHA512 + from Crypto.PublicKey import RSA as CryptoRSA, DSA as CryptoDSA + from Crypto.Signature import pkcs1_15, DSS + from Crypto.Util import number + except ImportError: + from Cryptodome.Hash import MD5, SHA1, SHA256, SHA384, SHA512 + from Cryptodome.PublicKey import RSA as CryptoRSA, DSA as CryptoDSA + from Cryptodome.Signature import pkcs1_15, DSS + from Cryptodome.Util import number +except ImportError: + validate = _need_pycrypto + validate_rrsig = _need_pycrypto + _have_pycrypto = False + _have_ecdsa = False +else: + validate = _validate + validate_rrsig = _validate_rrsig + _have_pycrypto = True + + try: + import ecdsa + import ecdsa.ecdsa + import ecdsa.ellipticcurve + import ecdsa.keys + except ImportError: + _have_ecdsa = False + else: + _have_ecdsa = True + + class ECKeyWrapper(object): + + def __init__(self, key, key_len): + self.key = key + self.key_len = key_len + + def verify(self, digest, sig): + diglong = number.bytes_to_long(digest) + return self.key.pubkey.verifies(diglong, sig) diff --git a/openpype/vendor/python/python_2/dns/e164.py b/openpype/vendor/python/python_2/dns/e164.py new file mode 100644 index 0000000000..758c47a784 --- /dev/null +++ b/openpype/vendor/python/python_2/dns/e164.py @@ -0,0 +1,105 @@ +# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license + +# Copyright (C) 2006-2017 Nominum, Inc. +# +# Permission to use, copy, modify, and distribute this software and its +# documentation for any purpose with or without fee is hereby granted, +# provided that the above copyright notice and this permission notice +# appear in all copies. +# +# THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES +# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR +# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT +# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. + +"""DNS E.164 helpers.""" + +import dns.exception +import dns.name +import dns.resolver +from ._compat import string_types, maybe_decode + +#: The public E.164 domain. +public_enum_domain = dns.name.from_text('e164.arpa.') + + +def from_e164(text, origin=public_enum_domain): + """Convert an E.164 number in textual form into a Name object whose + value is the ENUM domain name for that number. + + Non-digits in the text are ignored, i.e. "16505551212", + "+1.650.555.1212" and "1 (650) 555-1212" are all the same. + + *text*, a ``text``, is an E.164 number in textual form. + + *origin*, a ``dns.name.Name``, the domain in which the number + should be constructed. The default is ``e164.arpa.``. + + Returns a ``dns.name.Name``. + """ + + parts = [d for d in text if d.isdigit()] + parts.reverse() + return dns.name.from_text('.'.join(parts), origin=origin) + + +def to_e164(name, origin=public_enum_domain, want_plus_prefix=True): + """Convert an ENUM domain name into an E.164 number. + + Note that dnspython does not have any information about preferred + number formats within national numbering plans, so all numbers are + emitted as a simple string of digits, prefixed by a '+' (unless + *want_plus_prefix* is ``False``). + + *name* is a ``dns.name.Name``, the ENUM domain name. + + *origin* is a ``dns.name.Name``, a domain containing the ENUM + domain name. The name is relativized to this domain before being + converted to text. If ``None``, no relativization is done. + + *want_plus_prefix* is a ``bool``. If True, add a '+' to the beginning of + the returned number. + + Returns a ``text``. + + """ + if origin is not None: + name = name.relativize(origin) + dlabels = [d for d in name.labels if d.isdigit() and len(d) == 1] + if len(dlabels) != len(name.labels): + raise dns.exception.SyntaxError('non-digit labels in ENUM domain name') + dlabels.reverse() + text = b''.join(dlabels) + if want_plus_prefix: + text = b'+' + text + return maybe_decode(text) + + +def query(number, domains, resolver=None): + """Look for NAPTR RRs for the specified number in the specified domains. + + e.g. lookup('16505551212', ['e164.dnspython.org.', 'e164.arpa.']) + + *number*, a ``text`` is the number to look for. + + *domains* is an iterable containing ``dns.name.Name`` values. + + *resolver*, a ``dns.resolver.Resolver``, is the resolver to use. If + ``None``, the default resolver is used. + """ + + if resolver is None: + resolver = dns.resolver.get_default_resolver() + e_nx = dns.resolver.NXDOMAIN() + for domain in domains: + if isinstance(domain, string_types): + domain = dns.name.from_text(domain) + qname = dns.e164.from_e164(number, domain) + try: + return resolver.query(qname, 'NAPTR') + except dns.resolver.NXDOMAIN as e: + e_nx += e + raise e_nx diff --git a/openpype/vendor/python/python_2/dns/edns.py b/openpype/vendor/python/python_2/dns/edns.py new file mode 100644 index 0000000000..5660f7bb7a --- /dev/null +++ b/openpype/vendor/python/python_2/dns/edns.py @@ -0,0 +1,269 @@ +# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license + +# Copyright (C) 2009-2017 Nominum, Inc. +# +# Permission to use, copy, modify, and distribute this software and its +# documentation for any purpose with or without fee is hereby granted, +# provided that the above copyright notice and this permission notice +# appear in all copies. +# +# THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES +# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR +# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT +# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. + +"""EDNS Options""" + +from __future__ import absolute_import + +import math +import struct + +import dns.inet + +#: NSID +NSID = 3 +#: DAU +DAU = 5 +#: DHU +DHU = 6 +#: N3U +N3U = 7 +#: ECS (client-subnet) +ECS = 8 +#: EXPIRE +EXPIRE = 9 +#: COOKIE +COOKIE = 10 +#: KEEPALIVE +KEEPALIVE = 11 +#: PADDING +PADDING = 12 +#: CHAIN +CHAIN = 13 + +class Option(object): + + """Base class for all EDNS option types.""" + + def __init__(self, otype): + """Initialize an option. + + *otype*, an ``int``, is the option type. + """ + self.otype = otype + + def to_wire(self, file): + """Convert an option to wire format. + """ + raise NotImplementedError + + @classmethod + def from_wire(cls, otype, wire, current, olen): + """Build an EDNS option object from wire format. + + *otype*, an ``int``, is the option type. + + *wire*, a ``binary``, is the wire-format message. + + *current*, an ``int``, is the offset in *wire* of the beginning + of the rdata. + + *olen*, an ``int``, is the length of the wire-format option data + + Returns a ``dns.edns.Option``. + """ + + raise NotImplementedError + + def _cmp(self, other): + """Compare an EDNS option with another option of the same type. + + Returns < 0 if < *other*, 0 if == *other*, and > 0 if > *other*. + """ + raise NotImplementedError + + def __eq__(self, other): + if not isinstance(other, Option): + return False + if self.otype != other.otype: + return False + return self._cmp(other) == 0 + + def __ne__(self, other): + if not isinstance(other, Option): + return False + if self.otype != other.otype: + return False + return self._cmp(other) != 0 + + def __lt__(self, other): + if not isinstance(other, Option) or \ + self.otype != other.otype: + return NotImplemented + return self._cmp(other) < 0 + + def __le__(self, other): + if not isinstance(other, Option) or \ + self.otype != other.otype: + return NotImplemented + return self._cmp(other) <= 0 + + def __ge__(self, other): + if not isinstance(other, Option) or \ + self.otype != other.otype: + return NotImplemented + return self._cmp(other) >= 0 + + def __gt__(self, other): + if not isinstance(other, Option) or \ + self.otype != other.otype: + return NotImplemented + return self._cmp(other) > 0 + + +class GenericOption(Option): + + """Generic Option Class + + This class is used for EDNS option types for which we have no better + implementation. + """ + + def __init__(self, otype, data): + super(GenericOption, self).__init__(otype) + self.data = data + + def to_wire(self, file): + file.write(self.data) + + def to_text(self): + return "Generic %d" % self.otype + + @classmethod + def from_wire(cls, otype, wire, current, olen): + return cls(otype, wire[current: current + olen]) + + def _cmp(self, other): + if self.data == other.data: + return 0 + if self.data > other.data: + return 1 + return -1 + + +class ECSOption(Option): + """EDNS Client Subnet (ECS, RFC7871)""" + + def __init__(self, address, srclen=None, scopelen=0): + """*address*, a ``text``, is the client address information. + + *srclen*, an ``int``, the source prefix length, which is the + leftmost number of bits of the address to be used for the + lookup. The default is 24 for IPv4 and 56 for IPv6. + + *scopelen*, an ``int``, the scope prefix length. This value + must be 0 in queries, and should be set in responses. + """ + + super(ECSOption, self).__init__(ECS) + af = dns.inet.af_for_address(address) + + if af == dns.inet.AF_INET6: + self.family = 2 + if srclen is None: + srclen = 56 + elif af == dns.inet.AF_INET: + self.family = 1 + if srclen is None: + srclen = 24 + else: + raise ValueError('Bad ip family') + + self.address = address + self.srclen = srclen + self.scopelen = scopelen + + addrdata = dns.inet.inet_pton(af, address) + nbytes = int(math.ceil(srclen/8.0)) + + # Truncate to srclen and pad to the end of the last octet needed + # See RFC section 6 + self.addrdata = addrdata[:nbytes] + nbits = srclen % 8 + if nbits != 0: + last = struct.pack('B', ord(self.addrdata[-1:]) & (0xff << nbits)) + self.addrdata = self.addrdata[:-1] + last + + def to_text(self): + return "ECS {}/{} scope/{}".format(self.address, self.srclen, + self.scopelen) + + def to_wire(self, file): + file.write(struct.pack('!H', self.family)) + file.write(struct.pack('!BB', self.srclen, self.scopelen)) + file.write(self.addrdata) + + @classmethod + def from_wire(cls, otype, wire, cur, olen): + family, src, scope = struct.unpack('!HBB', wire[cur:cur+4]) + cur += 4 + + addrlen = int(math.ceil(src/8.0)) + + if family == 1: + af = dns.inet.AF_INET + pad = 4 - addrlen + elif family == 2: + af = dns.inet.AF_INET6 + pad = 16 - addrlen + else: + raise ValueError('unsupported family') + + addr = dns.inet.inet_ntop(af, wire[cur:cur+addrlen] + b'\x00' * pad) + return cls(addr, src, scope) + + def _cmp(self, other): + if self.addrdata == other.addrdata: + return 0 + if self.addrdata > other.addrdata: + return 1 + return -1 + +_type_to_class = { + ECS: ECSOption +} + +def get_option_class(otype): + """Return the class for the specified option type. + + The GenericOption class is used if a more specific class is not + known. + """ + + cls = _type_to_class.get(otype) + if cls is None: + cls = GenericOption + return cls + + +def option_from_wire(otype, wire, current, olen): + """Build an EDNS option object from wire format. + + *otype*, an ``int``, is the option type. + + *wire*, a ``binary``, is the wire-format message. + + *current*, an ``int``, is the offset in *wire* of the beginning + of the rdata. + + *olen*, an ``int``, is the length of the wire-format option data + + Returns an instance of a subclass of ``dns.edns.Option``. + """ + + cls = get_option_class(otype) + return cls.from_wire(otype, wire, current, olen) diff --git a/openpype/vendor/python/python_2/dns/entropy.py b/openpype/vendor/python/python_2/dns/entropy.py new file mode 100644 index 0000000000..00c6a4b389 --- /dev/null +++ b/openpype/vendor/python/python_2/dns/entropy.py @@ -0,0 +1,148 @@ +# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license + +# Copyright (C) 2009-2017 Nominum, Inc. +# +# Permission to use, copy, modify, and distribute this software and its +# documentation for any purpose with or without fee is hereby granted, +# provided that the above copyright notice and this permission notice +# appear in all copies. +# +# THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES +# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR +# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT +# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. + +import os +import random +import time +from ._compat import long, binary_type +try: + import threading as _threading +except ImportError: + import dummy_threading as _threading + + +class EntropyPool(object): + + # This is an entropy pool for Python implementations that do not + # have a working SystemRandom. I'm not sure there are any, but + # leaving this code doesn't hurt anything as the library code + # is used if present. + + def __init__(self, seed=None): + self.pool_index = 0 + self.digest = None + self.next_byte = 0 + self.lock = _threading.Lock() + try: + import hashlib + self.hash = hashlib.sha1() + self.hash_len = 20 + except ImportError: + try: + import sha + self.hash = sha.new() + self.hash_len = 20 + except ImportError: + import md5 # pylint: disable=import-error + self.hash = md5.new() + self.hash_len = 16 + self.pool = bytearray(b'\0' * self.hash_len) + if seed is not None: + self.stir(bytearray(seed)) + self.seeded = True + self.seed_pid = os.getpid() + else: + self.seeded = False + self.seed_pid = 0 + + def stir(self, entropy, already_locked=False): + if not already_locked: + self.lock.acquire() + try: + for c in entropy: + if self.pool_index == self.hash_len: + self.pool_index = 0 + b = c & 0xff + self.pool[self.pool_index] ^= b + self.pool_index += 1 + finally: + if not already_locked: + self.lock.release() + + def _maybe_seed(self): + if not self.seeded or self.seed_pid != os.getpid(): + try: + seed = os.urandom(16) + except Exception: + try: + r = open('/dev/urandom', 'rb', 0) + try: + seed = r.read(16) + finally: + r.close() + except Exception: + seed = str(time.time()) + self.seeded = True + self.seed_pid = os.getpid() + self.digest = None + seed = bytearray(seed) + self.stir(seed, True) + + def random_8(self): + self.lock.acquire() + try: + self._maybe_seed() + if self.digest is None or self.next_byte == self.hash_len: + self.hash.update(binary_type(self.pool)) + self.digest = bytearray(self.hash.digest()) + self.stir(self.digest, True) + self.next_byte = 0 + value = self.digest[self.next_byte] + self.next_byte += 1 + finally: + self.lock.release() + return value + + def random_16(self): + return self.random_8() * 256 + self.random_8() + + def random_32(self): + return self.random_16() * 65536 + self.random_16() + + def random_between(self, first, last): + size = last - first + 1 + if size > long(4294967296): + raise ValueError('too big') + if size > 65536: + rand = self.random_32 + max = long(4294967295) + elif size > 256: + rand = self.random_16 + max = 65535 + else: + rand = self.random_8 + max = 255 + return first + size * rand() // (max + 1) + +pool = EntropyPool() + +try: + system_random = random.SystemRandom() +except Exception: + system_random = None + +def random_16(): + if system_random is not None: + return system_random.randrange(0, 65536) + else: + return pool.random_16() + +def between(first, last): + if system_random is not None: + return system_random.randrange(first, last + 1) + else: + return pool.random_between(first, last) diff --git a/openpype/vendor/python/python_2/dns/exception.py b/openpype/vendor/python/python_2/dns/exception.py new file mode 100644 index 0000000000..71ff04f148 --- /dev/null +++ b/openpype/vendor/python/python_2/dns/exception.py @@ -0,0 +1,128 @@ +# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license + +# Copyright (C) 2003-2017 Nominum, Inc. +# +# Permission to use, copy, modify, and distribute this software and its +# documentation for any purpose with or without fee is hereby granted, +# provided that the above copyright notice and this permission notice +# appear in all copies. +# +# THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES +# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR +# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT +# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. + +"""Common DNS Exceptions. + +Dnspython modules may also define their own exceptions, which will +always be subclasses of ``DNSException``. +""" + +class DNSException(Exception): + """Abstract base class shared by all dnspython exceptions. + + It supports two basic modes of operation: + + a) Old/compatible mode is used if ``__init__`` was called with + empty *kwargs*. In compatible mode all *args* are passed + to the standard Python Exception class as before and all *args* are + printed by the standard ``__str__`` implementation. Class variable + ``msg`` (or doc string if ``msg`` is ``None``) is returned from ``str()`` + if *args* is empty. + + b) New/parametrized mode is used if ``__init__`` was called with + non-empty *kwargs*. + In the new mode *args* must be empty and all kwargs must match + those set in class variable ``supp_kwargs``. All kwargs are stored inside + ``self.kwargs`` and used in a new ``__str__`` implementation to construct + a formatted message based on the ``fmt`` class variable, a ``string``. + + In the simplest case it is enough to override the ``supp_kwargs`` + and ``fmt`` class variables to get nice parametrized messages. + """ + + msg = None # non-parametrized message + supp_kwargs = set() # accepted parameters for _fmt_kwargs (sanity check) + fmt = None # message parametrized with results from _fmt_kwargs + + def __init__(self, *args, **kwargs): + self._check_params(*args, **kwargs) + if kwargs: + self.kwargs = self._check_kwargs(**kwargs) + self.msg = str(self) + else: + self.kwargs = dict() # defined but empty for old mode exceptions + if self.msg is None: + # doc string is better implicit message than empty string + self.msg = self.__doc__ + if args: + super(DNSException, self).__init__(*args) + else: + super(DNSException, self).__init__(self.msg) + + def _check_params(self, *args, **kwargs): + """Old exceptions supported only args and not kwargs. + + For sanity we do not allow to mix old and new behavior.""" + if args or kwargs: + assert bool(args) != bool(kwargs), \ + 'keyword arguments are mutually exclusive with positional args' + + def _check_kwargs(self, **kwargs): + if kwargs: + assert set(kwargs.keys()) == self.supp_kwargs, \ + 'following set of keyword args is required: %s' % ( + self.supp_kwargs) + return kwargs + + def _fmt_kwargs(self, **kwargs): + """Format kwargs before printing them. + + Resulting dictionary has to have keys necessary for str.format call + on fmt class variable. + """ + fmtargs = {} + for kw, data in kwargs.items(): + if isinstance(data, (list, set)): + # convert list of to list of str() + fmtargs[kw] = list(map(str, data)) + if len(fmtargs[kw]) == 1: + # remove list brackets [] from single-item lists + fmtargs[kw] = fmtargs[kw].pop() + else: + fmtargs[kw] = data + return fmtargs + + def __str__(self): + if self.kwargs and self.fmt: + # provide custom message constructed from keyword arguments + fmtargs = self._fmt_kwargs(**self.kwargs) + return self.fmt.format(**fmtargs) + else: + # print *args directly in the same way as old DNSException + return super(DNSException, self).__str__() + + +class FormError(DNSException): + """DNS message is malformed.""" + + +class SyntaxError(DNSException): + """Text input is malformed.""" + + +class UnexpectedEnd(SyntaxError): + """Text input ended unexpectedly.""" + + +class TooBig(DNSException): + """The DNS message is too big.""" + + +class Timeout(DNSException): + """The DNS operation timed out.""" + supp_kwargs = {'timeout'} + fmt = "The DNS operation timed out after {timeout} seconds" diff --git a/openpype/vendor/python/python_2/dns/flags.py b/openpype/vendor/python/python_2/dns/flags.py new file mode 100644 index 0000000000..0119dec71f --- /dev/null +++ b/openpype/vendor/python/python_2/dns/flags.py @@ -0,0 +1,130 @@ +# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license + +# Copyright (C) 2001-2017 Nominum, Inc. +# +# Permission to use, copy, modify, and distribute this software and its +# documentation for any purpose with or without fee is hereby granted, +# provided that the above copyright notice and this permission notice +# appear in all copies. +# +# THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES +# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR +# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT +# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. + +"""DNS Message Flags.""" + +# Standard DNS flags + +#: Query Response +QR = 0x8000 +#: Authoritative Answer +AA = 0x0400 +#: Truncated Response +TC = 0x0200 +#: Recursion Desired +RD = 0x0100 +#: Recursion Available +RA = 0x0080 +#: Authentic Data +AD = 0x0020 +#: Checking Disabled +CD = 0x0010 + +# EDNS flags + +#: DNSSEC answer OK +DO = 0x8000 + +_by_text = { + 'QR': QR, + 'AA': AA, + 'TC': TC, + 'RD': RD, + 'RA': RA, + 'AD': AD, + 'CD': CD +} + +_edns_by_text = { + 'DO': DO +} + + +# We construct the inverse mappings programmatically to ensure that we +# cannot make any mistakes (e.g. omissions, cut-and-paste errors) that +# would cause the mappings not to be true inverses. + +_by_value = {y: x for x, y in _by_text.items()} + +_edns_by_value = {y: x for x, y in _edns_by_text.items()} + + +def _order_flags(table): + order = list(table.items()) + order.sort() + order.reverse() + return order + +_flags_order = _order_flags(_by_value) + +_edns_flags_order = _order_flags(_edns_by_value) + + +def _from_text(text, table): + flags = 0 + tokens = text.split() + for t in tokens: + flags = flags | table[t.upper()] + return flags + + +def _to_text(flags, table, order): + text_flags = [] + for k, v in order: + if flags & k != 0: + text_flags.append(v) + return ' '.join(text_flags) + + +def from_text(text): + """Convert a space-separated list of flag text values into a flags + value. + + Returns an ``int`` + """ + + return _from_text(text, _by_text) + + +def to_text(flags): + """Convert a flags value into a space-separated list of flag text + values. + + Returns a ``text``. + """ + + return _to_text(flags, _by_value, _flags_order) + + +def edns_from_text(text): + """Convert a space-separated list of EDNS flag text values into a EDNS + flags value. + + Returns an ``int`` + """ + + return _from_text(text, _edns_by_text) + + +def edns_to_text(flags): + """Convert an EDNS flags value into a space-separated list of EDNS flag + text values. + + Returns a ``text``. + """ + + return _to_text(flags, _edns_by_value, _edns_flags_order) diff --git a/openpype/vendor/python/python_2/dns/grange.py b/openpype/vendor/python/python_2/dns/grange.py new file mode 100644 index 0000000000..ffe8be7c46 --- /dev/null +++ b/openpype/vendor/python/python_2/dns/grange.py @@ -0,0 +1,69 @@ +# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license + +# Copyright (C) 2012-2017 Nominum, Inc. +# +# Permission to use, copy, modify, and distribute this software and its +# documentation for any purpose with or without fee is hereby granted, +# provided that the above copyright notice and this permission notice +# appear in all copies. +# +# THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES +# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR +# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT +# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. + +"""DNS GENERATE range conversion.""" + +import dns + +def from_text(text): + """Convert the text form of a range in a ``$GENERATE`` statement to an + integer. + + *text*, a ``str``, the textual range in ``$GENERATE`` form. + + Returns a tuple of three ``int`` values ``(start, stop, step)``. + """ + + # TODO, figure out the bounds on start, stop and step. + step = 1 + cur = '' + state = 0 + # state 0 1 2 3 4 + # x - y / z + + if text and text[0] == '-': + raise dns.exception.SyntaxError("Start cannot be a negative number") + + for c in text: + if c == '-' and state == 0: + start = int(cur) + cur = '' + state = 2 + elif c == '/': + stop = int(cur) + cur = '' + state = 4 + elif c.isdigit(): + cur += c + else: + raise dns.exception.SyntaxError("Could not parse %s" % (c)) + + if state in (1, 3): + raise dns.exception.SyntaxError() + + if state == 2: + stop = int(cur) + + if state == 4: + step = int(cur) + + assert step >= 1 + assert start >= 0 + assert start <= stop + # TODO, can start == stop? + + return (start, stop, step) diff --git a/openpype/vendor/python/python_2/dns/hash.py b/openpype/vendor/python/python_2/dns/hash.py new file mode 100644 index 0000000000..1713e62894 --- /dev/null +++ b/openpype/vendor/python/python_2/dns/hash.py @@ -0,0 +1,37 @@ +# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license + +# Copyright (C) 2011 Nominum, Inc. +# +# Permission to use, copy, modify, and distribute this software and its +# documentation for any purpose with or without fee is hereby granted, +# provided that the above copyright notice and this permission notice +# appear in all copies. +# +# THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES +# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR +# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT +# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. + +"""Hashing backwards compatibility wrapper""" + +import hashlib +import warnings + +warnings.warn( + "dns.hash module will be removed in future versions. Please use hashlib instead.", + DeprecationWarning) + +hashes = {} +hashes['MD5'] = hashlib.md5 +hashes['SHA1'] = hashlib.sha1 +hashes['SHA224'] = hashlib.sha224 +hashes['SHA256'] = hashlib.sha256 +hashes['SHA384'] = hashlib.sha384 +hashes['SHA512'] = hashlib.sha512 + + +def get(algorithm): + return hashes[algorithm.upper()] diff --git a/openpype/vendor/python/python_2/dns/inet.py b/openpype/vendor/python/python_2/dns/inet.py new file mode 100644 index 0000000000..c8d7c1b404 --- /dev/null +++ b/openpype/vendor/python/python_2/dns/inet.py @@ -0,0 +1,124 @@ +# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license + +# Copyright (C) 2003-2017 Nominum, Inc. +# +# Permission to use, copy, modify, and distribute this software and its +# documentation for any purpose with or without fee is hereby granted, +# provided that the above copyright notice and this permission notice +# appear in all copies. +# +# THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES +# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR +# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT +# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. + +"""Generic Internet address helper functions.""" + +import socket + +import dns.ipv4 +import dns.ipv6 + +from ._compat import maybe_ord + +# We assume that AF_INET is always defined. + +AF_INET = socket.AF_INET + +# AF_INET6 might not be defined in the socket module, but we need it. +# We'll try to use the socket module's value, and if it doesn't work, +# we'll use our own value. + +try: + AF_INET6 = socket.AF_INET6 +except AttributeError: + AF_INET6 = 9999 + + +def inet_pton(family, text): + """Convert the textual form of a network address into its binary form. + + *family* is an ``int``, the address family. + + *text* is a ``text``, the textual address. + + Raises ``NotImplementedError`` if the address family specified is not + implemented. + + Returns a ``binary``. + """ + + if family == AF_INET: + return dns.ipv4.inet_aton(text) + elif family == AF_INET6: + return dns.ipv6.inet_aton(text) + else: + raise NotImplementedError + + +def inet_ntop(family, address): + """Convert the binary form of a network address into its textual form. + + *family* is an ``int``, the address family. + + *address* is a ``binary``, the network address in binary form. + + Raises ``NotImplementedError`` if the address family specified is not + implemented. + + Returns a ``text``. + """ + + if family == AF_INET: + return dns.ipv4.inet_ntoa(address) + elif family == AF_INET6: + return dns.ipv6.inet_ntoa(address) + else: + raise NotImplementedError + + +def af_for_address(text): + """Determine the address family of a textual-form network address. + + *text*, a ``text``, the textual address. + + Raises ``ValueError`` if the address family cannot be determined + from the input. + + Returns an ``int``. + """ + + try: + dns.ipv4.inet_aton(text) + return AF_INET + except Exception: + try: + dns.ipv6.inet_aton(text) + return AF_INET6 + except: + raise ValueError + + +def is_multicast(text): + """Is the textual-form network address a multicast address? + + *text*, a ``text``, the textual address. + + Raises ``ValueError`` if the address family cannot be determined + from the input. + + Returns a ``bool``. + """ + + try: + first = maybe_ord(dns.ipv4.inet_aton(text)[0]) + return first >= 224 and first <= 239 + except Exception: + try: + first = maybe_ord(dns.ipv6.inet_aton(text)[0]) + return first == 255 + except Exception: + raise ValueError diff --git a/openpype/vendor/python/python_2/dns/ipv4.py b/openpype/vendor/python/python_2/dns/ipv4.py new file mode 100644 index 0000000000..8fc4f7dcfd --- /dev/null +++ b/openpype/vendor/python/python_2/dns/ipv4.py @@ -0,0 +1,63 @@ +# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license + +# Copyright (C) 2003-2017 Nominum, Inc. +# +# Permission to use, copy, modify, and distribute this software and its +# documentation for any purpose with or without fee is hereby granted, +# provided that the above copyright notice and this permission notice +# appear in all copies. +# +# THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES +# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR +# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT +# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. + +"""IPv4 helper functions.""" + +import struct + +import dns.exception +from ._compat import binary_type + +def inet_ntoa(address): + """Convert an IPv4 address in binary form to text form. + + *address*, a ``binary``, the IPv4 address in binary form. + + Returns a ``text``. + """ + + if len(address) != 4: + raise dns.exception.SyntaxError + if not isinstance(address, bytearray): + address = bytearray(address) + return ('%u.%u.%u.%u' % (address[0], address[1], + address[2], address[3])) + +def inet_aton(text): + """Convert an IPv4 address in text form to binary form. + + *text*, a ``text``, the IPv4 address in textual form. + + Returns a ``binary``. + """ + + if not isinstance(text, binary_type): + text = text.encode() + parts = text.split(b'.') + if len(parts) != 4: + raise dns.exception.SyntaxError + for part in parts: + if not part.isdigit(): + raise dns.exception.SyntaxError + if len(part) > 1 and part[0] == '0': + # No leading zeros + raise dns.exception.SyntaxError + try: + bytes = [int(part) for part in parts] + return struct.pack('BBBB', *bytes) + except: + raise dns.exception.SyntaxError diff --git a/openpype/vendor/python/python_2/dns/ipv6.py b/openpype/vendor/python/python_2/dns/ipv6.py new file mode 100644 index 0000000000..128e56c8f1 --- /dev/null +++ b/openpype/vendor/python/python_2/dns/ipv6.py @@ -0,0 +1,181 @@ +# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license + +# Copyright (C) 2003-2017 Nominum, Inc. +# +# Permission to use, copy, modify, and distribute this software and its +# documentation for any purpose with or without fee is hereby granted, +# provided that the above copyright notice and this permission notice +# appear in all copies. +# +# THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES +# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR +# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT +# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. + +"""IPv6 helper functions.""" + +import re +import binascii + +import dns.exception +import dns.ipv4 +from ._compat import xrange, binary_type, maybe_decode + +_leading_zero = re.compile(r'0+([0-9a-f]+)') + +def inet_ntoa(address): + """Convert an IPv6 address in binary form to text form. + + *address*, a ``binary``, the IPv6 address in binary form. + + Raises ``ValueError`` if the address isn't 16 bytes long. + Returns a ``text``. + """ + + if len(address) != 16: + raise ValueError("IPv6 addresses are 16 bytes long") + hex = binascii.hexlify(address) + chunks = [] + i = 0 + l = len(hex) + while i < l: + chunk = maybe_decode(hex[i : i + 4]) + # strip leading zeros. we do this with an re instead of + # with lstrip() because lstrip() didn't support chars until + # python 2.2.2 + m = _leading_zero.match(chunk) + if not m is None: + chunk = m.group(1) + chunks.append(chunk) + i += 4 + # + # Compress the longest subsequence of 0-value chunks to :: + # + best_start = 0 + best_len = 0 + start = -1 + last_was_zero = False + for i in xrange(8): + if chunks[i] != '0': + if last_was_zero: + end = i + current_len = end - start + if current_len > best_len: + best_start = start + best_len = current_len + last_was_zero = False + elif not last_was_zero: + start = i + last_was_zero = True + if last_was_zero: + end = 8 + current_len = end - start + if current_len > best_len: + best_start = start + best_len = current_len + if best_len > 1: + if best_start == 0 and \ + (best_len == 6 or + best_len == 5 and chunks[5] == 'ffff'): + # We have an embedded IPv4 address + if best_len == 6: + prefix = '::' + else: + prefix = '::ffff:' + hex = prefix + dns.ipv4.inet_ntoa(address[12:]) + else: + hex = ':'.join(chunks[:best_start]) + '::' + \ + ':'.join(chunks[best_start + best_len:]) + else: + hex = ':'.join(chunks) + return hex + +_v4_ending = re.compile(br'(.*):(\d+\.\d+\.\d+\.\d+)$') +_colon_colon_start = re.compile(br'::.*') +_colon_colon_end = re.compile(br'.*::$') + +def inet_aton(text): + """Convert an IPv6 address in text form to binary form. + + *text*, a ``text``, the IPv6 address in textual form. + + Returns a ``binary``. + """ + + # + # Our aim here is not something fast; we just want something that works. + # + if not isinstance(text, binary_type): + text = text.encode() + + if text == b'::': + text = b'0::' + # + # Get rid of the icky dot-quad syntax if we have it. + # + m = _v4_ending.match(text) + if not m is None: + b = bytearray(dns.ipv4.inet_aton(m.group(2))) + text = (u"{}:{:02x}{:02x}:{:02x}{:02x}".format(m.group(1).decode(), + b[0], b[1], b[2], + b[3])).encode() + # + # Try to turn '::' into ':'; if no match try to + # turn '::' into ':' + # + m = _colon_colon_start.match(text) + if not m is None: + text = text[1:] + else: + m = _colon_colon_end.match(text) + if not m is None: + text = text[:-1] + # + # Now canonicalize into 8 chunks of 4 hex digits each + # + chunks = text.split(b':') + l = len(chunks) + if l > 8: + raise dns.exception.SyntaxError + seen_empty = False + canonical = [] + for c in chunks: + if c == b'': + if seen_empty: + raise dns.exception.SyntaxError + seen_empty = True + for i in xrange(0, 8 - l + 1): + canonical.append(b'0000') + else: + lc = len(c) + if lc > 4: + raise dns.exception.SyntaxError + if lc != 4: + c = (b'0' * (4 - lc)) + c + canonical.append(c) + if l < 8 and not seen_empty: + raise dns.exception.SyntaxError + text = b''.join(canonical) + + # + # Finally we can go to binary. + # + try: + return binascii.unhexlify(text) + except (binascii.Error, TypeError): + raise dns.exception.SyntaxError + +_mapped_prefix = b'\x00' * 10 + b'\xff\xff' + +def is_mapped(address): + """Is the specified address a mapped IPv4 address? + + *address*, a ``binary`` is an IPv6 address in binary form. + + Returns a ``bool``. + """ + + return address.startswith(_mapped_prefix) diff --git a/openpype/vendor/python/python_2/dns/message.py b/openpype/vendor/python/python_2/dns/message.py new file mode 100644 index 0000000000..9d2b2f43c9 --- /dev/null +++ b/openpype/vendor/python/python_2/dns/message.py @@ -0,0 +1,1175 @@ +# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license + +# Copyright (C) 2001-2017 Nominum, Inc. +# +# Permission to use, copy, modify, and distribute this software and its +# documentation for any purpose with or without fee is hereby granted, +# provided that the above copyright notice and this permission notice +# appear in all copies. +# +# THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES +# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR +# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT +# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. + +"""DNS Messages""" + +from __future__ import absolute_import + +from io import StringIO +import struct +import time + +import dns.edns +import dns.exception +import dns.flags +import dns.name +import dns.opcode +import dns.entropy +import dns.rcode +import dns.rdata +import dns.rdataclass +import dns.rdatatype +import dns.rrset +import dns.renderer +import dns.tsig +import dns.wiredata + +from ._compat import long, xrange, string_types + + +class ShortHeader(dns.exception.FormError): + """The DNS packet passed to from_wire() is too short.""" + + +class TrailingJunk(dns.exception.FormError): + """The DNS packet passed to from_wire() has extra junk at the end of it.""" + + +class UnknownHeaderField(dns.exception.DNSException): + """The header field name was not recognized when converting from text + into a message.""" + + +class BadEDNS(dns.exception.FormError): + """An OPT record occurred somewhere other than the start of + the additional data section.""" + + +class BadTSIG(dns.exception.FormError): + """A TSIG record occurred somewhere other than the end of + the additional data section.""" + + +class UnknownTSIGKey(dns.exception.DNSException): + """A TSIG with an unknown key was received.""" + + +#: The question section number +QUESTION = 0 + +#: The answer section number +ANSWER = 1 + +#: The authority section number +AUTHORITY = 2 + +#: The additional section number +ADDITIONAL = 3 + +class Message(object): + """A DNS message.""" + + def __init__(self, id=None): + if id is None: + self.id = dns.entropy.random_16() + else: + self.id = id + self.flags = 0 + self.question = [] + self.answer = [] + self.authority = [] + self.additional = [] + self.edns = -1 + self.ednsflags = 0 + self.payload = 0 + self.options = [] + self.request_payload = 0 + self.keyring = None + self.keyname = None + self.keyalgorithm = dns.tsig.default_algorithm + self.request_mac = b'' + self.other_data = b'' + self.tsig_error = 0 + self.fudge = 300 + self.original_id = self.id + self.mac = b'' + self.xfr = False + self.origin = None + self.tsig_ctx = None + self.had_tsig = False + self.multi = False + self.first = True + self.index = {} + + def __repr__(self): + return '' + + def __str__(self): + return self.to_text() + + def to_text(self, origin=None, relativize=True, **kw): + """Convert the message to text. + + The *origin*, *relativize*, and any other keyword + arguments are passed to the RRset ``to_wire()`` method. + + Returns a ``text``. + """ + + s = StringIO() + s.write(u'id %d\n' % self.id) + s.write(u'opcode %s\n' % + dns.opcode.to_text(dns.opcode.from_flags(self.flags))) + rc = dns.rcode.from_flags(self.flags, self.ednsflags) + s.write(u'rcode %s\n' % dns.rcode.to_text(rc)) + s.write(u'flags %s\n' % dns.flags.to_text(self.flags)) + if self.edns >= 0: + s.write(u'edns %s\n' % self.edns) + if self.ednsflags != 0: + s.write(u'eflags %s\n' % + dns.flags.edns_to_text(self.ednsflags)) + s.write(u'payload %d\n' % self.payload) + for opt in self.options: + s.write(u'option %s\n' % opt.to_text()) + is_update = dns.opcode.is_update(self.flags) + if is_update: + s.write(u';ZONE\n') + else: + s.write(u';QUESTION\n') + for rrset in self.question: + s.write(rrset.to_text(origin, relativize, **kw)) + s.write(u'\n') + if is_update: + s.write(u';PREREQ\n') + else: + s.write(u';ANSWER\n') + for rrset in self.answer: + s.write(rrset.to_text(origin, relativize, **kw)) + s.write(u'\n') + if is_update: + s.write(u';UPDATE\n') + else: + s.write(u';AUTHORITY\n') + for rrset in self.authority: + s.write(rrset.to_text(origin, relativize, **kw)) + s.write(u'\n') + s.write(u';ADDITIONAL\n') + for rrset in self.additional: + s.write(rrset.to_text(origin, relativize, **kw)) + s.write(u'\n') + # + # We strip off the final \n so the caller can print the result without + # doing weird things to get around eccentricities in Python print + # formatting + # + return s.getvalue()[:-1] + + def __eq__(self, other): + """Two messages are equal if they have the same content in the + header, question, answer, and authority sections. + + Returns a ``bool``. + """ + + if not isinstance(other, Message): + return False + if self.id != other.id: + return False + if self.flags != other.flags: + return False + for n in self.question: + if n not in other.question: + return False + for n in other.question: + if n not in self.question: + return False + for n in self.answer: + if n not in other.answer: + return False + for n in other.answer: + if n not in self.answer: + return False + for n in self.authority: + if n not in other.authority: + return False + for n in other.authority: + if n not in self.authority: + return False + return True + + def __ne__(self, other): + return not self.__eq__(other) + + def is_response(self, other): + """Is this message a response to *other*? + + Returns a ``bool``. + """ + + if other.flags & dns.flags.QR == 0 or \ + self.id != other.id or \ + dns.opcode.from_flags(self.flags) != \ + dns.opcode.from_flags(other.flags): + return False + if dns.rcode.from_flags(other.flags, other.ednsflags) != \ + dns.rcode.NOERROR: + return True + if dns.opcode.is_update(self.flags): + return True + for n in self.question: + if n not in other.question: + return False + for n in other.question: + if n not in self.question: + return False + return True + + def section_number(self, section): + """Return the "section number" of the specified section for use + in indexing. The question section is 0, the answer section is 1, + the authority section is 2, and the additional section is 3. + + *section* is one of the section attributes of this message. + + Raises ``ValueError`` if the section isn't known. + + Returns an ``int``. + """ + + if section is self.question: + return QUESTION + elif section is self.answer: + return ANSWER + elif section is self.authority: + return AUTHORITY + elif section is self.additional: + return ADDITIONAL + else: + raise ValueError('unknown section') + + def section_from_number(self, number): + """Return the "section number" of the specified section for use + in indexing. The question section is 0, the answer section is 1, + the authority section is 2, and the additional section is 3. + + *section* is one of the section attributes of this message. + + Raises ``ValueError`` if the section isn't known. + + Returns an ``int``. + """ + + if number == QUESTION: + return self.question + elif number == ANSWER: + return self.answer + elif number == AUTHORITY: + return self.authority + elif number == ADDITIONAL: + return self.additional + else: + raise ValueError('unknown section') + + def find_rrset(self, section, name, rdclass, rdtype, + covers=dns.rdatatype.NONE, deleting=None, create=False, + force_unique=False): + """Find the RRset with the given attributes in the specified section. + + *section*, an ``int`` section number, or one of the section + attributes of this message. This specifies the + the section of the message to search. For example:: + + my_message.find_rrset(my_message.answer, name, rdclass, rdtype) + my_message.find_rrset(dns.message.ANSWER, name, rdclass, rdtype) + + *name*, a ``dns.name.Name``, the name of the RRset. + + *rdclass*, an ``int``, the class of the RRset. + + *rdtype*, an ``int``, the type of the RRset. + + *covers*, an ``int`` or ``None``, the covers value of the RRset. + The default is ``None``. + + *deleting*, an ``int`` or ``None``, the deleting value of the RRset. + The default is ``None``. + + *create*, a ``bool``. If ``True``, create the RRset if it is not found. + The created RRset is appended to *section*. + + *force_unique*, a ``bool``. If ``True`` and *create* is also ``True``, + create a new RRset regardless of whether a matching RRset exists + already. The default is ``False``. This is useful when creating + DDNS Update messages, as order matters for them. + + Raises ``KeyError`` if the RRset was not found and create was + ``False``. + + Returns a ``dns.rrset.RRset object``. + """ + + if isinstance(section, int): + section_number = section + section = self.section_from_number(section_number) + else: + section_number = self.section_number(section) + key = (section_number, name, rdclass, rdtype, covers, deleting) + if not force_unique: + if self.index is not None: + rrset = self.index.get(key) + if rrset is not None: + return rrset + else: + for rrset in section: + if rrset.match(name, rdclass, rdtype, covers, deleting): + return rrset + if not create: + raise KeyError + rrset = dns.rrset.RRset(name, rdclass, rdtype, covers, deleting) + section.append(rrset) + if self.index is not None: + self.index[key] = rrset + return rrset + + def get_rrset(self, section, name, rdclass, rdtype, + covers=dns.rdatatype.NONE, deleting=None, create=False, + force_unique=False): + """Get the RRset with the given attributes in the specified section. + + If the RRset is not found, None is returned. + + *section*, an ``int`` section number, or one of the section + attributes of this message. This specifies the + the section of the message to search. For example:: + + my_message.get_rrset(my_message.answer, name, rdclass, rdtype) + my_message.get_rrset(dns.message.ANSWER, name, rdclass, rdtype) + + *name*, a ``dns.name.Name``, the name of the RRset. + + *rdclass*, an ``int``, the class of the RRset. + + *rdtype*, an ``int``, the type of the RRset. + + *covers*, an ``int`` or ``None``, the covers value of the RRset. + The default is ``None``. + + *deleting*, an ``int`` or ``None``, the deleting value of the RRset. + The default is ``None``. + + *create*, a ``bool``. If ``True``, create the RRset if it is not found. + The created RRset is appended to *section*. + + *force_unique*, a ``bool``. If ``True`` and *create* is also ``True``, + create a new RRset regardless of whether a matching RRset exists + already. The default is ``False``. This is useful when creating + DDNS Update messages, as order matters for them. + + Returns a ``dns.rrset.RRset object`` or ``None``. + """ + + try: + rrset = self.find_rrset(section, name, rdclass, rdtype, covers, + deleting, create, force_unique) + except KeyError: + rrset = None + return rrset + + def to_wire(self, origin=None, max_size=0, **kw): + """Return a string containing the message in DNS compressed wire + format. + + Additional keyword arguments are passed to the RRset ``to_wire()`` + method. + + *origin*, a ``dns.name.Name`` or ``None``, the origin to be appended + to any relative names. + + *max_size*, an ``int``, the maximum size of the wire format + output; default is 0, which means "the message's request + payload, if nonzero, or 65535". + + Raises ``dns.exception.TooBig`` if *max_size* was exceeded. + + Returns a ``binary``. + """ + + if max_size == 0: + if self.request_payload != 0: + max_size = self.request_payload + else: + max_size = 65535 + if max_size < 512: + max_size = 512 + elif max_size > 65535: + max_size = 65535 + r = dns.renderer.Renderer(self.id, self.flags, max_size, origin) + for rrset in self.question: + r.add_question(rrset.name, rrset.rdtype, rrset.rdclass) + for rrset in self.answer: + r.add_rrset(dns.renderer.ANSWER, rrset, **kw) + for rrset in self.authority: + r.add_rrset(dns.renderer.AUTHORITY, rrset, **kw) + if self.edns >= 0: + r.add_edns(self.edns, self.ednsflags, self.payload, self.options) + for rrset in self.additional: + r.add_rrset(dns.renderer.ADDITIONAL, rrset, **kw) + r.write_header() + if self.keyname is not None: + r.add_tsig(self.keyname, self.keyring[self.keyname], + self.fudge, self.original_id, self.tsig_error, + self.other_data, self.request_mac, + self.keyalgorithm) + self.mac = r.mac + return r.get_wire() + + def use_tsig(self, keyring, keyname=None, fudge=300, + original_id=None, tsig_error=0, other_data=b'', + algorithm=dns.tsig.default_algorithm): + """When sending, a TSIG signature using the specified keyring + and keyname should be added. + + See the documentation of the Message class for a complete + description of the keyring dictionary. + + *keyring*, a ``dict``, the TSIG keyring to use. If a + *keyring* is specified but a *keyname* is not, then the key + used will be the first key in the *keyring*. Note that the + order of keys in a dictionary is not defined, so applications + should supply a keyname when a keyring is used, unless they + know the keyring contains only one key. + + *keyname*, a ``dns.name.Name`` or ``None``, the name of the TSIG key + to use; defaults to ``None``. The key must be defined in the keyring. + + *fudge*, an ``int``, the TSIG time fudge. + + *original_id*, an ``int``, the TSIG original id. If ``None``, + the message's id is used. + + *tsig_error*, an ``int``, the TSIG error code. + + *other_data*, a ``binary``, the TSIG other data. + + *algorithm*, a ``dns.name.Name``, the TSIG algorithm to use. + """ + + self.keyring = keyring + if keyname is None: + self.keyname = list(self.keyring.keys())[0] + else: + if isinstance(keyname, string_types): + keyname = dns.name.from_text(keyname) + self.keyname = keyname + self.keyalgorithm = algorithm + self.fudge = fudge + if original_id is None: + self.original_id = self.id + else: + self.original_id = original_id + self.tsig_error = tsig_error + self.other_data = other_data + + def use_edns(self, edns=0, ednsflags=0, payload=1280, request_payload=None, + options=None): + """Configure EDNS behavior. + + *edns*, an ``int``, is the EDNS level to use. Specifying + ``None``, ``False``, or ``-1`` means "do not use EDNS", and in this case + the other parameters are ignored. Specifying ``True`` is + equivalent to specifying 0, i.e. "use EDNS0". + + *ednsflags*, an ``int``, the EDNS flag values. + + *payload*, an ``int``, is the EDNS sender's payload field, which is the + maximum size of UDP datagram the sender can handle. I.e. how big + a response to this message can be. + + *request_payload*, an ``int``, is the EDNS payload size to use when + sending this message. If not specified, defaults to the value of + *payload*. + + *options*, a list of ``dns.edns.Option`` objects or ``None``, the EDNS + options. + """ + + if edns is None or edns is False: + edns = -1 + if edns is True: + edns = 0 + if request_payload is None: + request_payload = payload + if edns < 0: + ednsflags = 0 + payload = 0 + request_payload = 0 + options = [] + else: + # make sure the EDNS version in ednsflags agrees with edns + ednsflags &= long(0xFF00FFFF) + ednsflags |= (edns << 16) + if options is None: + options = [] + self.edns = edns + self.ednsflags = ednsflags + self.payload = payload + self.options = options + self.request_payload = request_payload + + def want_dnssec(self, wanted=True): + """Enable or disable 'DNSSEC desired' flag in requests. + + *wanted*, a ``bool``. If ``True``, then DNSSEC data is + desired in the response, EDNS is enabled if required, and then + the DO bit is set. If ``False``, the DO bit is cleared if + EDNS is enabled. + """ + + if wanted: + if self.edns < 0: + self.use_edns() + self.ednsflags |= dns.flags.DO + elif self.edns >= 0: + self.ednsflags &= ~dns.flags.DO + + def rcode(self): + """Return the rcode. + + Returns an ``int``. + """ + return dns.rcode.from_flags(self.flags, self.ednsflags) + + def set_rcode(self, rcode): + """Set the rcode. + + *rcode*, an ``int``, is the rcode to set. + """ + (value, evalue) = dns.rcode.to_flags(rcode) + self.flags &= 0xFFF0 + self.flags |= value + self.ednsflags &= long(0x00FFFFFF) + self.ednsflags |= evalue + if self.ednsflags != 0 and self.edns < 0: + self.edns = 0 + + def opcode(self): + """Return the opcode. + + Returns an ``int``. + """ + return dns.opcode.from_flags(self.flags) + + def set_opcode(self, opcode): + """Set the opcode. + + *opcode*, an ``int``, is the opcode to set. + """ + self.flags &= 0x87FF + self.flags |= dns.opcode.to_flags(opcode) + + +class _WireReader(object): + + """Wire format reader. + + wire: a binary, is the wire-format message. + message: The message object being built + current: When building a message object from wire format, this + variable contains the offset from the beginning of wire of the next octet + to be read. + updating: Is the message a dynamic update? + one_rr_per_rrset: Put each RR into its own RRset? + ignore_trailing: Ignore trailing junk at end of request? + zone_rdclass: The class of the zone in messages which are + DNS dynamic updates. + """ + + def __init__(self, wire, message, question_only=False, + one_rr_per_rrset=False, ignore_trailing=False): + self.wire = dns.wiredata.maybe_wrap(wire) + self.message = message + self.current = 0 + self.updating = False + self.zone_rdclass = dns.rdataclass.IN + self.question_only = question_only + self.one_rr_per_rrset = one_rr_per_rrset + self.ignore_trailing = ignore_trailing + + def _get_question(self, qcount): + """Read the next *qcount* records from the wire data and add them to + the question section. + """ + + if self.updating and qcount > 1: + raise dns.exception.FormError + + for i in xrange(0, qcount): + (qname, used) = dns.name.from_wire(self.wire, self.current) + if self.message.origin is not None: + qname = qname.relativize(self.message.origin) + self.current = self.current + used + (rdtype, rdclass) = \ + struct.unpack('!HH', + self.wire[self.current:self.current + 4]) + self.current = self.current + 4 + self.message.find_rrset(self.message.question, qname, + rdclass, rdtype, create=True, + force_unique=True) + if self.updating: + self.zone_rdclass = rdclass + + def _get_section(self, section, count): + """Read the next I{count} records from the wire data and add them to + the specified section. + + section: the section of the message to which to add records + count: the number of records to read + """ + + if self.updating or self.one_rr_per_rrset: + force_unique = True + else: + force_unique = False + seen_opt = False + for i in xrange(0, count): + rr_start = self.current + (name, used) = dns.name.from_wire(self.wire, self.current) + absolute_name = name + if self.message.origin is not None: + name = name.relativize(self.message.origin) + self.current = self.current + used + (rdtype, rdclass, ttl, rdlen) = \ + struct.unpack('!HHIH', + self.wire[self.current:self.current + 10]) + self.current = self.current + 10 + if rdtype == dns.rdatatype.OPT: + if section is not self.message.additional or seen_opt: + raise BadEDNS + self.message.payload = rdclass + self.message.ednsflags = ttl + self.message.edns = (ttl & 0xff0000) >> 16 + self.message.options = [] + current = self.current + optslen = rdlen + while optslen > 0: + (otype, olen) = \ + struct.unpack('!HH', + self.wire[current:current + 4]) + current = current + 4 + opt = dns.edns.option_from_wire( + otype, self.wire, current, olen) + self.message.options.append(opt) + current = current + olen + optslen = optslen - 4 - olen + seen_opt = True + elif rdtype == dns.rdatatype.TSIG: + if not (section is self.message.additional and + i == (count - 1)): + raise BadTSIG + if self.message.keyring is None: + raise UnknownTSIGKey('got signed message without keyring') + secret = self.message.keyring.get(absolute_name) + if secret is None: + raise UnknownTSIGKey("key '%s' unknown" % name) + self.message.keyname = absolute_name + (self.message.keyalgorithm, self.message.mac) = \ + dns.tsig.get_algorithm_and_mac(self.wire, self.current, + rdlen) + self.message.tsig_ctx = \ + dns.tsig.validate(self.wire, + absolute_name, + secret, + int(time.time()), + self.message.request_mac, + rr_start, + self.current, + rdlen, + self.message.tsig_ctx, + self.message.multi, + self.message.first) + self.message.had_tsig = True + else: + if ttl < 0: + ttl = 0 + if self.updating and \ + (rdclass == dns.rdataclass.ANY or + rdclass == dns.rdataclass.NONE): + deleting = rdclass + rdclass = self.zone_rdclass + else: + deleting = None + if deleting == dns.rdataclass.ANY or \ + (deleting == dns.rdataclass.NONE and + section is self.message.answer): + covers = dns.rdatatype.NONE + rd = None + else: + rd = dns.rdata.from_wire(rdclass, rdtype, self.wire, + self.current, rdlen, + self.message.origin) + covers = rd.covers() + if self.message.xfr and rdtype == dns.rdatatype.SOA: + force_unique = True + rrset = self.message.find_rrset(section, name, + rdclass, rdtype, covers, + deleting, True, force_unique) + if rd is not None: + rrset.add(rd, ttl) + self.current = self.current + rdlen + + def read(self): + """Read a wire format DNS message and build a dns.message.Message + object.""" + + l = len(self.wire) + if l < 12: + raise ShortHeader + (self.message.id, self.message.flags, qcount, ancount, + aucount, adcount) = struct.unpack('!HHHHHH', self.wire[:12]) + self.current = 12 + if dns.opcode.is_update(self.message.flags): + self.updating = True + self._get_question(qcount) + if self.question_only: + return + self._get_section(self.message.answer, ancount) + self._get_section(self.message.authority, aucount) + self._get_section(self.message.additional, adcount) + if not self.ignore_trailing and self.current != l: + raise TrailingJunk + if self.message.multi and self.message.tsig_ctx and \ + not self.message.had_tsig: + self.message.tsig_ctx.update(self.wire) + + +def from_wire(wire, keyring=None, request_mac=b'', xfr=False, origin=None, + tsig_ctx=None, multi=False, first=True, + question_only=False, one_rr_per_rrset=False, + ignore_trailing=False): + """Convert a DNS wire format message into a message + object. + + *keyring*, a ``dict``, the keyring to use if the message is signed. + + *request_mac*, a ``binary``. If the message is a response to a + TSIG-signed request, *request_mac* should be set to the MAC of + that request. + + *xfr*, a ``bool``, should be set to ``True`` if this message is part of + a zone transfer. + + *origin*, a ``dns.name.Name`` or ``None``. If the message is part + of a zone transfer, *origin* should be the origin name of the + zone. + + *tsig_ctx*, a ``hmac.HMAC`` objext, the ongoing TSIG context, used + when validating zone transfers. + + *multi*, a ``bool``, should be set to ``True`` if this message + part of a multiple message sequence. + + *first*, a ``bool``, should be set to ``True`` if this message is + stand-alone, or the first message in a multi-message sequence. + + *question_only*, a ``bool``. If ``True``, read only up to + the end of the question section. + + *one_rr_per_rrset*, a ``bool``. If ``True``, put each RR into its + own RRset. + + *ignore_trailing*, a ``bool``. If ``True``, ignore trailing + junk at end of the message. + + Raises ``dns.message.ShortHeader`` if the message is less than 12 octets + long. + + Raises ``dns.messaage.TrailingJunk`` if there were octets in the message + past the end of the proper DNS message, and *ignore_trailing* is ``False``. + + Raises ``dns.message.BadEDNS`` if an OPT record was in the + wrong section, or occurred more than once. + + Raises ``dns.message.BadTSIG`` if a TSIG record was not the last + record of the additional data section. + + Returns a ``dns.message.Message``. + """ + + m = Message(id=0) + m.keyring = keyring + m.request_mac = request_mac + m.xfr = xfr + m.origin = origin + m.tsig_ctx = tsig_ctx + m.multi = multi + m.first = first + + reader = _WireReader(wire, m, question_only, one_rr_per_rrset, + ignore_trailing) + reader.read() + + return m + + +class _TextReader(object): + + """Text format reader. + + tok: the tokenizer. + message: The message object being built. + updating: Is the message a dynamic update? + zone_rdclass: The class of the zone in messages which are + DNS dynamic updates. + last_name: The most recently read name when building a message object. + """ + + def __init__(self, text, message): + self.message = message + self.tok = dns.tokenizer.Tokenizer(text) + self.last_name = None + self.zone_rdclass = dns.rdataclass.IN + self.updating = False + + def _header_line(self, section): + """Process one line from the text format header section.""" + + token = self.tok.get() + what = token.value + if what == 'id': + self.message.id = self.tok.get_int() + elif what == 'flags': + while True: + token = self.tok.get() + if not token.is_identifier(): + self.tok.unget(token) + break + self.message.flags = self.message.flags | \ + dns.flags.from_text(token.value) + if dns.opcode.is_update(self.message.flags): + self.updating = True + elif what == 'edns': + self.message.edns = self.tok.get_int() + self.message.ednsflags = self.message.ednsflags | \ + (self.message.edns << 16) + elif what == 'eflags': + if self.message.edns < 0: + self.message.edns = 0 + while True: + token = self.tok.get() + if not token.is_identifier(): + self.tok.unget(token) + break + self.message.ednsflags = self.message.ednsflags | \ + dns.flags.edns_from_text(token.value) + elif what == 'payload': + self.message.payload = self.tok.get_int() + if self.message.edns < 0: + self.message.edns = 0 + elif what == 'opcode': + text = self.tok.get_string() + self.message.flags = self.message.flags | \ + dns.opcode.to_flags(dns.opcode.from_text(text)) + elif what == 'rcode': + text = self.tok.get_string() + self.message.set_rcode(dns.rcode.from_text(text)) + else: + raise UnknownHeaderField + self.tok.get_eol() + + def _question_line(self, section): + """Process one line from the text format question section.""" + + token = self.tok.get(want_leading=True) + if not token.is_whitespace(): + self.last_name = dns.name.from_text(token.value, None) + name = self.last_name + token = self.tok.get() + if not token.is_identifier(): + raise dns.exception.SyntaxError + # Class + try: + rdclass = dns.rdataclass.from_text(token.value) + token = self.tok.get() + if not token.is_identifier(): + raise dns.exception.SyntaxError + except dns.exception.SyntaxError: + raise dns.exception.SyntaxError + except Exception: + rdclass = dns.rdataclass.IN + # Type + rdtype = dns.rdatatype.from_text(token.value) + self.message.find_rrset(self.message.question, name, + rdclass, rdtype, create=True, + force_unique=True) + if self.updating: + self.zone_rdclass = rdclass + self.tok.get_eol() + + def _rr_line(self, section): + """Process one line from the text format answer, authority, or + additional data sections. + """ + + deleting = None + # Name + token = self.tok.get(want_leading=True) + if not token.is_whitespace(): + self.last_name = dns.name.from_text(token.value, None) + name = self.last_name + token = self.tok.get() + if not token.is_identifier(): + raise dns.exception.SyntaxError + # TTL + try: + ttl = int(token.value, 0) + token = self.tok.get() + if not token.is_identifier(): + raise dns.exception.SyntaxError + except dns.exception.SyntaxError: + raise dns.exception.SyntaxError + except Exception: + ttl = 0 + # Class + try: + rdclass = dns.rdataclass.from_text(token.value) + token = self.tok.get() + if not token.is_identifier(): + raise dns.exception.SyntaxError + if rdclass == dns.rdataclass.ANY or rdclass == dns.rdataclass.NONE: + deleting = rdclass + rdclass = self.zone_rdclass + except dns.exception.SyntaxError: + raise dns.exception.SyntaxError + except Exception: + rdclass = dns.rdataclass.IN + # Type + rdtype = dns.rdatatype.from_text(token.value) + token = self.tok.get() + if not token.is_eol_or_eof(): + self.tok.unget(token) + rd = dns.rdata.from_text(rdclass, rdtype, self.tok, None) + covers = rd.covers() + else: + rd = None + covers = dns.rdatatype.NONE + rrset = self.message.find_rrset(section, name, + rdclass, rdtype, covers, + deleting, True, self.updating) + if rd is not None: + rrset.add(rd, ttl) + + def read(self): + """Read a text format DNS message and build a dns.message.Message + object.""" + + line_method = self._header_line + section = None + while 1: + token = self.tok.get(True, True) + if token.is_eol_or_eof(): + break + if token.is_comment(): + u = token.value.upper() + if u == 'HEADER': + line_method = self._header_line + elif u == 'QUESTION' or u == 'ZONE': + line_method = self._question_line + section = self.message.question + elif u == 'ANSWER' or u == 'PREREQ': + line_method = self._rr_line + section = self.message.answer + elif u == 'AUTHORITY' or u == 'UPDATE': + line_method = self._rr_line + section = self.message.authority + elif u == 'ADDITIONAL': + line_method = self._rr_line + section = self.message.additional + self.tok.get_eol() + continue + self.tok.unget(token) + line_method(section) + + +def from_text(text): + """Convert the text format message into a message object. + + *text*, a ``text``, the text format message. + + Raises ``dns.message.UnknownHeaderField`` if a header is unknown. + + Raises ``dns.exception.SyntaxError`` if the text is badly formed. + + Returns a ``dns.message.Message object`` + """ + + # 'text' can also be a file, but we don't publish that fact + # since it's an implementation detail. The official file + # interface is from_file(). + + m = Message() + + reader = _TextReader(text, m) + reader.read() + + return m + + +def from_file(f): + """Read the next text format message from the specified file. + + *f*, a ``file`` or ``text``. If *f* is text, it is treated as the + pathname of a file to open. + + Raises ``dns.message.UnknownHeaderField`` if a header is unknown. + + Raises ``dns.exception.SyntaxError`` if the text is badly formed. + + Returns a ``dns.message.Message object`` + """ + + str_type = string_types + opts = 'rU' + + if isinstance(f, str_type): + f = open(f, opts) + want_close = True + else: + want_close = False + + try: + m = from_text(f) + finally: + if want_close: + f.close() + return m + + +def make_query(qname, rdtype, rdclass=dns.rdataclass.IN, use_edns=None, + want_dnssec=False, ednsflags=None, payload=None, + request_payload=None, options=None): + """Make a query message. + + The query name, type, and class may all be specified either + as objects of the appropriate type, or as strings. + + The query will have a randomly chosen query id, and its DNS flags + will be set to dns.flags.RD. + + qname, a ``dns.name.Name`` or ``text``, the query name. + + *rdtype*, an ``int`` or ``text``, the desired rdata type. + + *rdclass*, an ``int`` or ``text``, the desired rdata class; the default + is class IN. + + *use_edns*, an ``int``, ``bool`` or ``None``. The EDNS level to use; the + default is None (no EDNS). + See the description of dns.message.Message.use_edns() for the possible + values for use_edns and their meanings. + + *want_dnssec*, a ``bool``. If ``True``, DNSSEC data is desired. + + *ednsflags*, an ``int``, the EDNS flag values. + + *payload*, an ``int``, is the EDNS sender's payload field, which is the + maximum size of UDP datagram the sender can handle. I.e. how big + a response to this message can be. + + *request_payload*, an ``int``, is the EDNS payload size to use when + sending this message. If not specified, defaults to the value of + *payload*. + + *options*, a list of ``dns.edns.Option`` objects or ``None``, the EDNS + options. + + Returns a ``dns.message.Message`` + """ + + if isinstance(qname, string_types): + qname = dns.name.from_text(qname) + if isinstance(rdtype, string_types): + rdtype = dns.rdatatype.from_text(rdtype) + if isinstance(rdclass, string_types): + rdclass = dns.rdataclass.from_text(rdclass) + m = Message() + m.flags |= dns.flags.RD + m.find_rrset(m.question, qname, rdclass, rdtype, create=True, + force_unique=True) + # only pass keywords on to use_edns if they have been set to a + # non-None value. Setting a field will turn EDNS on if it hasn't + # been configured. + kwargs = {} + if ednsflags is not None: + kwargs['ednsflags'] = ednsflags + if use_edns is None: + use_edns = 0 + if payload is not None: + kwargs['payload'] = payload + if use_edns is None: + use_edns = 0 + if request_payload is not None: + kwargs['request_payload'] = request_payload + if use_edns is None: + use_edns = 0 + if options is not None: + kwargs['options'] = options + if use_edns is None: + use_edns = 0 + kwargs['edns'] = use_edns + m.use_edns(**kwargs) + m.want_dnssec(want_dnssec) + return m + + +def make_response(query, recursion_available=False, our_payload=8192, + fudge=300): + """Make a message which is a response for the specified query. + The message returned is really a response skeleton; it has all + of the infrastructure required of a response, but none of the + content. + + The response's question section is a shallow copy of the query's + question section, so the query's question RRsets should not be + changed. + + *query*, a ``dns.message.Message``, the query to respond to. + + *recursion_available*, a ``bool``, should RA be set in the response? + + *our_payload*, an ``int``, the payload size to advertise in EDNS + responses. + + *fudge*, an ``int``, the TSIG time fudge. + + Returns a ``dns.message.Message`` object. + """ + + if query.flags & dns.flags.QR: + raise dns.exception.FormError('specified query message is not a query') + response = dns.message.Message(query.id) + response.flags = dns.flags.QR | (query.flags & dns.flags.RD) + if recursion_available: + response.flags |= dns.flags.RA + response.set_opcode(query.opcode()) + response.question = list(query.question) + if query.edns >= 0: + response.use_edns(0, 0, our_payload, query.payload) + if query.had_tsig: + response.use_tsig(query.keyring, query.keyname, fudge, None, 0, b'', + query.keyalgorithm) + response.request_mac = query.mac + return response diff --git a/openpype/vendor/python/python_2/dns/name.py b/openpype/vendor/python/python_2/dns/name.py new file mode 100644 index 0000000000..0bcfd83432 --- /dev/null +++ b/openpype/vendor/python/python_2/dns/name.py @@ -0,0 +1,994 @@ +# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license + +# Copyright (C) 2001-2017 Nominum, Inc. +# +# Permission to use, copy, modify, and distribute this software and its +# documentation for any purpose with or without fee is hereby granted, +# provided that the above copyright notice and this permission notice +# appear in all copies. +# +# THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES +# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR +# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT +# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. + +"""DNS Names. +""" + +from io import BytesIO +import struct +import sys +import copy +import encodings.idna +try: + import idna + have_idna_2008 = True +except ImportError: + have_idna_2008 = False + +import dns.exception +import dns.wiredata + +from ._compat import long, binary_type, text_type, unichr, maybe_decode + +try: + maxint = sys.maxint # pylint: disable=sys-max-int +except AttributeError: + maxint = (1 << (8 * struct.calcsize("P"))) // 2 - 1 + + +# fullcompare() result values + +#: The compared names have no relationship to each other. +NAMERELN_NONE = 0 +#: the first name is a superdomain of the second. +NAMERELN_SUPERDOMAIN = 1 +#: The first name is a subdomain of the second. +NAMERELN_SUBDOMAIN = 2 +#: The compared names are equal. +NAMERELN_EQUAL = 3 +#: The compared names have a common ancestor. +NAMERELN_COMMONANCESTOR = 4 + + +class EmptyLabel(dns.exception.SyntaxError): + """A DNS label is empty.""" + + +class BadEscape(dns.exception.SyntaxError): + """An escaped code in a text format of DNS name is invalid.""" + + +class BadPointer(dns.exception.FormError): + """A DNS compression pointer points forward instead of backward.""" + + +class BadLabelType(dns.exception.FormError): + """The label type in DNS name wire format is unknown.""" + + +class NeedAbsoluteNameOrOrigin(dns.exception.DNSException): + """An attempt was made to convert a non-absolute name to + wire when there was also a non-absolute (or missing) origin.""" + + +class NameTooLong(dns.exception.FormError): + """A DNS name is > 255 octets long.""" + + +class LabelTooLong(dns.exception.SyntaxError): + """A DNS label is > 63 octets long.""" + + +class AbsoluteConcatenation(dns.exception.DNSException): + """An attempt was made to append anything other than the + empty name to an absolute DNS name.""" + + +class NoParent(dns.exception.DNSException): + """An attempt was made to get the parent of the root name + or the empty name.""" + +class NoIDNA2008(dns.exception.DNSException): + """IDNA 2008 processing was requested but the idna module is not + available.""" + + +class IDNAException(dns.exception.DNSException): + """IDNA processing raised an exception.""" + + supp_kwargs = {'idna_exception'} + fmt = "IDNA processing exception: {idna_exception}" + + +class IDNACodec(object): + """Abstract base class for IDNA encoder/decoders.""" + + def __init__(self): + pass + + def encode(self, label): + raise NotImplementedError + + def decode(self, label): + # We do not apply any IDNA policy on decode; we just + downcased = label.lower() + if downcased.startswith(b'xn--'): + try: + label = downcased[4:].decode('punycode') + except Exception as e: + raise IDNAException(idna_exception=e) + else: + label = maybe_decode(label) + return _escapify(label, True) + + +class IDNA2003Codec(IDNACodec): + """IDNA 2003 encoder/decoder.""" + + def __init__(self, strict_decode=False): + """Initialize the IDNA 2003 encoder/decoder. + + *strict_decode* is a ``bool``. If `True`, then IDNA2003 checking + is done when decoding. This can cause failures if the name + was encoded with IDNA2008. The default is `False`. + """ + + super(IDNA2003Codec, self).__init__() + self.strict_decode = strict_decode + + def encode(self, label): + """Encode *label*.""" + + if label == '': + return b'' + try: + return encodings.idna.ToASCII(label) + except UnicodeError: + raise LabelTooLong + + def decode(self, label): + """Decode *label*.""" + if not self.strict_decode: + return super(IDNA2003Codec, self).decode(label) + if label == b'': + return u'' + try: + return _escapify(encodings.idna.ToUnicode(label), True) + except Exception as e: + raise IDNAException(idna_exception=e) + + +class IDNA2008Codec(IDNACodec): + """IDNA 2008 encoder/decoder. + + *uts_46* is a ``bool``. If True, apply Unicode IDNA + compatibility processing as described in Unicode Technical + Standard #46 (http://unicode.org/reports/tr46/). + If False, do not apply the mapping. The default is False. + + *transitional* is a ``bool``: If True, use the + "transitional" mode described in Unicode Technical Standard + #46. The default is False. + + *allow_pure_ascii* is a ``bool``. If True, then a label which + consists of only ASCII characters is allowed. This is less + strict than regular IDNA 2008, but is also necessary for mixed + names, e.g. a name with starting with "_sip._tcp." and ending + in an IDN suffix which would otherwise be disallowed. The + default is False. + + *strict_decode* is a ``bool``: If True, then IDNA2008 checking + is done when decoding. This can cause failures if the name + was encoded with IDNA2003. The default is False. + """ + + def __init__(self, uts_46=False, transitional=False, + allow_pure_ascii=False, strict_decode=False): + """Initialize the IDNA 2008 encoder/decoder.""" + super(IDNA2008Codec, self).__init__() + self.uts_46 = uts_46 + self.transitional = transitional + self.allow_pure_ascii = allow_pure_ascii + self.strict_decode = strict_decode + + def is_all_ascii(self, label): + for c in label: + if ord(c) > 0x7f: + return False + return True + + def encode(self, label): + if label == '': + return b'' + if self.allow_pure_ascii and self.is_all_ascii(label): + return label.encode('ascii') + if not have_idna_2008: + raise NoIDNA2008 + try: + if self.uts_46: + label = idna.uts46_remap(label, False, self.transitional) + return idna.alabel(label) + except idna.IDNAError as e: + raise IDNAException(idna_exception=e) + + def decode(self, label): + if not self.strict_decode: + return super(IDNA2008Codec, self).decode(label) + if label == b'': + return u'' + if not have_idna_2008: + raise NoIDNA2008 + try: + if self.uts_46: + label = idna.uts46_remap(label, False, False) + return _escapify(idna.ulabel(label), True) + except idna.IDNAError as e: + raise IDNAException(idna_exception=e) + +_escaped = bytearray(b'"().;\\@$') + +IDNA_2003_Practical = IDNA2003Codec(False) +IDNA_2003_Strict = IDNA2003Codec(True) +IDNA_2003 = IDNA_2003_Practical +IDNA_2008_Practical = IDNA2008Codec(True, False, True, False) +IDNA_2008_UTS_46 = IDNA2008Codec(True, False, False, False) +IDNA_2008_Strict = IDNA2008Codec(False, False, False, True) +IDNA_2008_Transitional = IDNA2008Codec(True, True, False, False) +IDNA_2008 = IDNA_2008_Practical + +def _escapify(label, unicode_mode=False): + """Escape the characters in label which need it. + @param unicode_mode: escapify only special and whitespace (<= 0x20) + characters + @returns: the escaped string + @rtype: string""" + if not unicode_mode: + text = '' + if isinstance(label, text_type): + label = label.encode() + for c in bytearray(label): + if c in _escaped: + text += '\\' + chr(c) + elif c > 0x20 and c < 0x7F: + text += chr(c) + else: + text += '\\%03d' % c + return text.encode() + + text = u'' + if isinstance(label, binary_type): + label = label.decode() + for c in label: + if c > u'\x20' and c < u'\x7f': + text += c + else: + if c >= u'\x7f': + text += c + else: + text += u'\\%03d' % ord(c) + return text + +def _validate_labels(labels): + """Check for empty labels in the middle of a label sequence, + labels that are too long, and for too many labels. + + Raises ``dns.name.NameTooLong`` if the name as a whole is too long. + + Raises ``dns.name.EmptyLabel`` if a label is empty (i.e. the root + label) and appears in a position other than the end of the label + sequence + + """ + + l = len(labels) + total = 0 + i = -1 + j = 0 + for label in labels: + ll = len(label) + total += ll + 1 + if ll > 63: + raise LabelTooLong + if i < 0 and label == b'': + i = j + j += 1 + if total > 255: + raise NameTooLong + if i >= 0 and i != l - 1: + raise EmptyLabel + + +def _maybe_convert_to_binary(label): + """If label is ``text``, convert it to ``binary``. If it is already + ``binary`` just return it. + + """ + + if isinstance(label, binary_type): + return label + if isinstance(label, text_type): + return label.encode() + raise ValueError + + +class Name(object): + + """A DNS name. + + The dns.name.Name class represents a DNS name as a tuple of + labels. Each label is a `binary` in DNS wire format. Instances + of the class are immutable. + """ + + __slots__ = ['labels'] + + def __init__(self, labels): + """*labels* is any iterable whose values are ``text`` or ``binary``. + """ + + labels = [_maybe_convert_to_binary(x) for x in labels] + super(Name, self).__setattr__('labels', tuple(labels)) + _validate_labels(self.labels) + + def __setattr__(self, name, value): + # Names are immutable + raise TypeError("object doesn't support attribute assignment") + + def __copy__(self): + return Name(self.labels) + + def __deepcopy__(self, memo): + return Name(copy.deepcopy(self.labels, memo)) + + def __getstate__(self): + # Names can be pickled + return {'labels': self.labels} + + def __setstate__(self, state): + super(Name, self).__setattr__('labels', state['labels']) + _validate_labels(self.labels) + + def is_absolute(self): + """Is the most significant label of this name the root label? + + Returns a ``bool``. + """ + + return len(self.labels) > 0 and self.labels[-1] == b'' + + def is_wild(self): + """Is this name wild? (I.e. Is the least significant label '*'?) + + Returns a ``bool``. + """ + + return len(self.labels) > 0 and self.labels[0] == b'*' + + def __hash__(self): + """Return a case-insensitive hash of the name. + + Returns an ``int``. + """ + + h = long(0) + for label in self.labels: + for c in bytearray(label.lower()): + h += (h << 3) + c + return int(h % maxint) + + def fullcompare(self, other): + """Compare two names, returning a 3-tuple + ``(relation, order, nlabels)``. + + *relation* describes the relation ship between the names, + and is one of: ``dns.name.NAMERELN_NONE``, + ``dns.name.NAMERELN_SUPERDOMAIN``, ``dns.name.NAMERELN_SUBDOMAIN``, + ``dns.name.NAMERELN_EQUAL``, or ``dns.name.NAMERELN_COMMONANCESTOR``. + + *order* is < 0 if *self* < *other*, > 0 if *self* > *other*, and == + 0 if *self* == *other*. A relative name is always less than an + absolute name. If both names have the same relativity, then + the DNSSEC order relation is used to order them. + + *nlabels* is the number of significant labels that the two names + have in common. + + Here are some examples. Names ending in "." are absolute names, + those not ending in "." are relative names. + + ============= ============= =========== ===== ======= + self other relation order nlabels + ============= ============= =========== ===== ======= + www.example. www.example. equal 0 3 + www.example. example. subdomain > 0 2 + example. www.example. superdomain < 0 2 + example1.com. example2.com. common anc. < 0 2 + example1 example2. none < 0 0 + example1. example2 none > 0 0 + ============= ============= =========== ===== ======= + """ + + sabs = self.is_absolute() + oabs = other.is_absolute() + if sabs != oabs: + if sabs: + return (NAMERELN_NONE, 1, 0) + else: + return (NAMERELN_NONE, -1, 0) + l1 = len(self.labels) + l2 = len(other.labels) + ldiff = l1 - l2 + if ldiff < 0: + l = l1 + else: + l = l2 + + order = 0 + nlabels = 0 + namereln = NAMERELN_NONE + while l > 0: + l -= 1 + l1 -= 1 + l2 -= 1 + label1 = self.labels[l1].lower() + label2 = other.labels[l2].lower() + if label1 < label2: + order = -1 + if nlabels > 0: + namereln = NAMERELN_COMMONANCESTOR + return (namereln, order, nlabels) + elif label1 > label2: + order = 1 + if nlabels > 0: + namereln = NAMERELN_COMMONANCESTOR + return (namereln, order, nlabels) + nlabels += 1 + order = ldiff + if ldiff < 0: + namereln = NAMERELN_SUPERDOMAIN + elif ldiff > 0: + namereln = NAMERELN_SUBDOMAIN + else: + namereln = NAMERELN_EQUAL + return (namereln, order, nlabels) + + def is_subdomain(self, other): + """Is self a subdomain of other? + + Note that the notion of subdomain includes equality, e.g. + "dnpython.org" is a subdomain of itself. + + Returns a ``bool``. + """ + + (nr, o, nl) = self.fullcompare(other) + if nr == NAMERELN_SUBDOMAIN or nr == NAMERELN_EQUAL: + return True + return False + + def is_superdomain(self, other): + """Is self a superdomain of other? + + Note that the notion of superdomain includes equality, e.g. + "dnpython.org" is a superdomain of itself. + + Returns a ``bool``. + """ + + (nr, o, nl) = self.fullcompare(other) + if nr == NAMERELN_SUPERDOMAIN or nr == NAMERELN_EQUAL: + return True + return False + + def canonicalize(self): + """Return a name which is equal to the current name, but is in + DNSSEC canonical form. + """ + + return Name([x.lower() for x in self.labels]) + + def __eq__(self, other): + if isinstance(other, Name): + return self.fullcompare(other)[1] == 0 + else: + return False + + def __ne__(self, other): + if isinstance(other, Name): + return self.fullcompare(other)[1] != 0 + else: + return True + + def __lt__(self, other): + if isinstance(other, Name): + return self.fullcompare(other)[1] < 0 + else: + return NotImplemented + + def __le__(self, other): + if isinstance(other, Name): + return self.fullcompare(other)[1] <= 0 + else: + return NotImplemented + + def __ge__(self, other): + if isinstance(other, Name): + return self.fullcompare(other)[1] >= 0 + else: + return NotImplemented + + def __gt__(self, other): + if isinstance(other, Name): + return self.fullcompare(other)[1] > 0 + else: + return NotImplemented + + def __repr__(self): + return '' + + def __str__(self): + return self.to_text(False) + + def to_text(self, omit_final_dot=False): + """Convert name to DNS text format. + + *omit_final_dot* is a ``bool``. If True, don't emit the final + dot (denoting the root label) for absolute names. The default + is False. + + Returns a ``text``. + """ + + if len(self.labels) == 0: + return maybe_decode(b'@') + if len(self.labels) == 1 and self.labels[0] == b'': + return maybe_decode(b'.') + if omit_final_dot and self.is_absolute(): + l = self.labels[:-1] + else: + l = self.labels + s = b'.'.join(map(_escapify, l)) + return maybe_decode(s) + + def to_unicode(self, omit_final_dot=False, idna_codec=None): + """Convert name to Unicode text format. + + IDN ACE labels are converted to Unicode. + + *omit_final_dot* is a ``bool``. If True, don't emit the final + dot (denoting the root label) for absolute names. The default + is False. + *idna_codec* specifies the IDNA encoder/decoder. If None, the + dns.name.IDNA_2003_Practical encoder/decoder is used. + The IDNA_2003_Practical decoder does + not impose any policy, it just decodes punycode, so if you + don't want checking for compliance, you can use this decoder + for IDNA2008 as well. + + Returns a ``text``. + """ + + if len(self.labels) == 0: + return u'@' + if len(self.labels) == 1 and self.labels[0] == b'': + return u'.' + if omit_final_dot and self.is_absolute(): + l = self.labels[:-1] + else: + l = self.labels + if idna_codec is None: + idna_codec = IDNA_2003_Practical + return u'.'.join([idna_codec.decode(x) for x in l]) + + def to_digestable(self, origin=None): + """Convert name to a format suitable for digesting in hashes. + + The name is canonicalized and converted to uncompressed wire + format. All names in wire format are absolute. If the name + is a relative name, then an origin must be supplied. + + *origin* is a ``dns.name.Name`` or ``None``. If the name is + relative and origin is not ``None``, then origin will be appended + to the name. + + Raises ``dns.name.NeedAbsoluteNameOrOrigin`` if the name is + relative and no origin was provided. + + Returns a ``binary``. + """ + + if not self.is_absolute(): + if origin is None or not origin.is_absolute(): + raise NeedAbsoluteNameOrOrigin + labels = list(self.labels) + labels.extend(list(origin.labels)) + else: + labels = self.labels + dlabels = [struct.pack('!B%ds' % len(x), len(x), x.lower()) + for x in labels] + return b''.join(dlabels) + + def to_wire(self, file=None, compress=None, origin=None): + """Convert name to wire format, possibly compressing it. + + *file* is the file where the name is emitted (typically a + BytesIO file). If ``None`` (the default), a ``binary`` + containing the wire name will be returned. + + *compress*, a ``dict``, is the compression table to use. If + ``None`` (the default), names will not be compressed. + + *origin* is a ``dns.name.Name`` or ``None``. If the name is + relative and origin is not ``None``, then *origin* will be appended + to it. + + Raises ``dns.name.NeedAbsoluteNameOrOrigin`` if the name is + relative and no origin was provided. + + Returns a ``binary`` or ``None``. + """ + + if file is None: + file = BytesIO() + want_return = True + else: + want_return = False + + if not self.is_absolute(): + if origin is None or not origin.is_absolute(): + raise NeedAbsoluteNameOrOrigin + labels = list(self.labels) + labels.extend(list(origin.labels)) + else: + labels = self.labels + i = 0 + for label in labels: + n = Name(labels[i:]) + i += 1 + if compress is not None: + pos = compress.get(n) + else: + pos = None + if pos is not None: + value = 0xc000 + pos + s = struct.pack('!H', value) + file.write(s) + break + else: + if compress is not None and len(n) > 1: + pos = file.tell() + if pos <= 0x3fff: + compress[n] = pos + l = len(label) + file.write(struct.pack('!B', l)) + if l > 0: + file.write(label) + if want_return: + return file.getvalue() + + def __len__(self): + """The length of the name (in labels). + + Returns an ``int``. + """ + + return len(self.labels) + + def __getitem__(self, index): + return self.labels[index] + + def __add__(self, other): + return self.concatenate(other) + + def __sub__(self, other): + return self.relativize(other) + + def split(self, depth): + """Split a name into a prefix and suffix names at the specified depth. + + *depth* is an ``int`` specifying the number of labels in the suffix + + Raises ``ValueError`` if *depth* was not >= 0 and <= the length of the + name. + + Returns the tuple ``(prefix, suffix)``. + """ + + l = len(self.labels) + if depth == 0: + return (self, dns.name.empty) + elif depth == l: + return (dns.name.empty, self) + elif depth < 0 or depth > l: + raise ValueError( + 'depth must be >= 0 and <= the length of the name') + return (Name(self[: -depth]), Name(self[-depth:])) + + def concatenate(self, other): + """Return a new name which is the concatenation of self and other. + + Raises ``dns.name.AbsoluteConcatenation`` if the name is + absolute and *other* is not the empty name. + + Returns a ``dns.name.Name``. + """ + + if self.is_absolute() and len(other) > 0: + raise AbsoluteConcatenation + labels = list(self.labels) + labels.extend(list(other.labels)) + return Name(labels) + + def relativize(self, origin): + """If the name is a subdomain of *origin*, return a new name which is + the name relative to origin. Otherwise return the name. + + For example, relativizing ``www.dnspython.org.`` to origin + ``dnspython.org.`` returns the name ``www``. Relativizing ``example.`` + to origin ``dnspython.org.`` returns ``example.``. + + Returns a ``dns.name.Name``. + """ + + if origin is not None and self.is_subdomain(origin): + return Name(self[: -len(origin)]) + else: + return self + + def derelativize(self, origin): + """If the name is a relative name, return a new name which is the + concatenation of the name and origin. Otherwise return the name. + + For example, derelativizing ``www`` to origin ``dnspython.org.`` + returns the name ``www.dnspython.org.``. Derelativizing ``example.`` + to origin ``dnspython.org.`` returns ``example.``. + + Returns a ``dns.name.Name``. + """ + + if not self.is_absolute(): + return self.concatenate(origin) + else: + return self + + def choose_relativity(self, origin=None, relativize=True): + """Return a name with the relativity desired by the caller. + + If *origin* is ``None``, then the name is returned. + Otherwise, if *relativize* is ``True`` the name is + relativized, and if *relativize* is ``False`` the name is + derelativized. + + Returns a ``dns.name.Name``. + """ + + if origin: + if relativize: + return self.relativize(origin) + else: + return self.derelativize(origin) + else: + return self + + def parent(self): + """Return the parent of the name. + + For example, the parent of ``www.dnspython.org.`` is ``dnspython.org``. + + Raises ``dns.name.NoParent`` if the name is either the root name or the + empty name, and thus has no parent. + + Returns a ``dns.name.Name``. + """ + + if self == root or self == empty: + raise NoParent + return Name(self.labels[1:]) + +#: The root name, '.' +root = Name([b'']) + +#: The empty name. +empty = Name([]) + +def from_unicode(text, origin=root, idna_codec=None): + """Convert unicode text into a Name object. + + Labels are encoded in IDN ACE form according to rules specified by + the IDNA codec. + + *text*, a ``text``, is the text to convert into a name. + + *origin*, a ``dns.name.Name``, specifies the origin to + append to non-absolute names. The default is the root name. + + *idna_codec*, a ``dns.name.IDNACodec``, specifies the IDNA + encoder/decoder. If ``None``, the default IDNA 2003 encoder/decoder + is used. + + Returns a ``dns.name.Name``. + """ + + if not isinstance(text, text_type): + raise ValueError("input to from_unicode() must be a unicode string") + if not (origin is None or isinstance(origin, Name)): + raise ValueError("origin must be a Name or None") + labels = [] + label = u'' + escaping = False + edigits = 0 + total = 0 + if idna_codec is None: + idna_codec = IDNA_2003 + if text == u'@': + text = u'' + if text: + if text == u'.': + return Name([b'']) # no Unicode "u" on this constant! + for c in text: + if escaping: + if edigits == 0: + if c.isdigit(): + total = int(c) + edigits += 1 + else: + label += c + escaping = False + else: + if not c.isdigit(): + raise BadEscape + total *= 10 + total += int(c) + edigits += 1 + if edigits == 3: + escaping = False + label += unichr(total) + elif c in [u'.', u'\u3002', u'\uff0e', u'\uff61']: + if len(label) == 0: + raise EmptyLabel + labels.append(idna_codec.encode(label)) + label = u'' + elif c == u'\\': + escaping = True + edigits = 0 + total = 0 + else: + label += c + if escaping: + raise BadEscape + if len(label) > 0: + labels.append(idna_codec.encode(label)) + else: + labels.append(b'') + + if (len(labels) == 0 or labels[-1] != b'') and origin is not None: + labels.extend(list(origin.labels)) + return Name(labels) + + +def from_text(text, origin=root, idna_codec=None): + """Convert text into a Name object. + + *text*, a ``text``, is the text to convert into a name. + + *origin*, a ``dns.name.Name``, specifies the origin to + append to non-absolute names. The default is the root name. + + *idna_codec*, a ``dns.name.IDNACodec``, specifies the IDNA + encoder/decoder. If ``None``, the default IDNA 2003 encoder/decoder + is used. + + Returns a ``dns.name.Name``. + """ + + if isinstance(text, text_type): + return from_unicode(text, origin, idna_codec) + if not isinstance(text, binary_type): + raise ValueError("input to from_text() must be a string") + if not (origin is None or isinstance(origin, Name)): + raise ValueError("origin must be a Name or None") + labels = [] + label = b'' + escaping = False + edigits = 0 + total = 0 + if text == b'@': + text = b'' + if text: + if text == b'.': + return Name([b'']) + for c in bytearray(text): + byte_ = struct.pack('!B', c) + if escaping: + if edigits == 0: + if byte_.isdigit(): + total = int(byte_) + edigits += 1 + else: + label += byte_ + escaping = False + else: + if not byte_.isdigit(): + raise BadEscape + total *= 10 + total += int(byte_) + edigits += 1 + if edigits == 3: + escaping = False + label += struct.pack('!B', total) + elif byte_ == b'.': + if len(label) == 0: + raise EmptyLabel + labels.append(label) + label = b'' + elif byte_ == b'\\': + escaping = True + edigits = 0 + total = 0 + else: + label += byte_ + if escaping: + raise BadEscape + if len(label) > 0: + labels.append(label) + else: + labels.append(b'') + if (len(labels) == 0 or labels[-1] != b'') and origin is not None: + labels.extend(list(origin.labels)) + return Name(labels) + + +def from_wire(message, current): + """Convert possibly compressed wire format into a Name. + + *message* is a ``binary`` containing an entire DNS message in DNS + wire form. + + *current*, an ``int``, is the offset of the beginning of the name + from the start of the message + + Raises ``dns.name.BadPointer`` if a compression pointer did not + point backwards in the message. + + Raises ``dns.name.BadLabelType`` if an invalid label type was encountered. + + Returns a ``(dns.name.Name, int)`` tuple consisting of the name + that was read and the number of bytes of the wire format message + which were consumed reading it. + """ + + if not isinstance(message, binary_type): + raise ValueError("input to from_wire() must be a byte string") + message = dns.wiredata.maybe_wrap(message) + labels = [] + biggest_pointer = current + hops = 0 + count = message[current] + current += 1 + cused = 1 + while count != 0: + if count < 64: + labels.append(message[current: current + count].unwrap()) + current += count + if hops == 0: + cused += count + elif count >= 192: + current = (count & 0x3f) * 256 + message[current] + if hops == 0: + cused += 1 + if current >= biggest_pointer: + raise BadPointer + biggest_pointer = current + hops += 1 + else: + raise BadLabelType + count = message[current] + current += 1 + if hops == 0: + cused += 1 + labels.append('') + return (Name(labels), cused) diff --git a/openpype/vendor/python/python_2/dns/namedict.py b/openpype/vendor/python/python_2/dns/namedict.py new file mode 100644 index 0000000000..37a13104e6 --- /dev/null +++ b/openpype/vendor/python/python_2/dns/namedict.py @@ -0,0 +1,108 @@ +# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license + +# Copyright (C) 2003-2017 Nominum, Inc. +# Copyright (C) 2016 Coresec Systems AB +# +# Permission to use, copy, modify, and distribute this software and its +# documentation for any purpose with or without fee is hereby granted, +# provided that the above copyright notice and this permission notice +# appear in all copies. +# +# THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES +# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR +# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT +# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. +# +# THE SOFTWARE IS PROVIDED "AS IS" AND CORESEC SYSTEMS AB DISCLAIMS ALL +# WARRANTIES WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED +# WARRANTIES OF MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL CORESEC +# SYSTEMS AB BE LIABLE FOR ANY SPECIAL, DIRECT, INDIRECT, OR +# CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM LOSS +# OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, +# NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION +# WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. + +"""DNS name dictionary""" + +import collections +import dns.name +from ._compat import xrange + + +class NameDict(collections.MutableMapping): + """A dictionary whose keys are dns.name.Name objects. + + In addition to being like a regular Python dictionary, this + dictionary can also get the deepest match for a given key. + """ + + __slots__ = ["max_depth", "max_depth_items", "__store"] + + def __init__(self, *args, **kwargs): + super(NameDict, self).__init__() + self.__store = dict() + #: the maximum depth of the keys that have ever been added + self.max_depth = 0 + #: the number of items of maximum depth + self.max_depth_items = 0 + self.update(dict(*args, **kwargs)) + + def __update_max_depth(self, key): + if len(key) == self.max_depth: + self.max_depth_items = self.max_depth_items + 1 + elif len(key) > self.max_depth: + self.max_depth = len(key) + self.max_depth_items = 1 + + def __getitem__(self, key): + return self.__store[key] + + def __setitem__(self, key, value): + if not isinstance(key, dns.name.Name): + raise ValueError('NameDict key must be a name') + self.__store[key] = value + self.__update_max_depth(key) + + def __delitem__(self, key): + value = self.__store.pop(key) + if len(value) == self.max_depth: + self.max_depth_items = self.max_depth_items - 1 + if self.max_depth_items == 0: + self.max_depth = 0 + for k in self.__store: + self.__update_max_depth(k) + + def __iter__(self): + return iter(self.__store) + + def __len__(self): + return len(self.__store) + + def has_key(self, key): + return key in self.__store + + def get_deepest_match(self, name): + """Find the deepest match to *fname* in the dictionary. + + The deepest match is the longest name in the dictionary which is + a superdomain of *name*. Note that *superdomain* includes matching + *name* itself. + + *name*, a ``dns.name.Name``, the name to find. + + Returns a ``(key, value)`` where *key* is the deepest + ``dns.name.Name``, and *value* is the value associated with *key*. + """ + + depth = len(name) + if depth > self.max_depth: + depth = self.max_depth + for i in xrange(-depth, 0): + n = dns.name.Name(name[i:]) + if n in self: + return (n, self[n]) + v = self[dns.name.empty] + return (dns.name.empty, v) diff --git a/openpype/vendor/python/python_2/dns/node.py b/openpype/vendor/python/python_2/dns/node.py new file mode 100644 index 0000000000..8a7f19f523 --- /dev/null +++ b/openpype/vendor/python/python_2/dns/node.py @@ -0,0 +1,182 @@ +# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license + +# Copyright (C) 2001-2017 Nominum, Inc. +# +# Permission to use, copy, modify, and distribute this software and its +# documentation for any purpose with or without fee is hereby granted, +# provided that the above copyright notice and this permission notice +# appear in all copies. +# +# THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES +# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR +# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT +# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. + +"""DNS nodes. A node is a set of rdatasets.""" + +from io import StringIO + +import dns.rdataset +import dns.rdatatype +import dns.renderer + + +class Node(object): + + """A Node is a set of rdatasets.""" + + __slots__ = ['rdatasets'] + + def __init__(self): + #: the set of rdatsets, represented as a list. + self.rdatasets = [] + + def to_text(self, name, **kw): + """Convert a node to text format. + + Each rdataset at the node is printed. Any keyword arguments + to this method are passed on to the rdataset's to_text() method. + + *name*, a ``dns.name.Name`` or ``text``, the owner name of the rdatasets. + + Returns a ``text``. + """ + + s = StringIO() + for rds in self.rdatasets: + if len(rds) > 0: + s.write(rds.to_text(name, **kw)) + s.write(u'\n') + return s.getvalue()[:-1] + + def __repr__(self): + return '' + + def __eq__(self, other): + # + # This is inefficient. Good thing we don't need to do it much. + # + for rd in self.rdatasets: + if rd not in other.rdatasets: + return False + for rd in other.rdatasets: + if rd not in self.rdatasets: + return False + return True + + def __ne__(self, other): + return not self.__eq__(other) + + def __len__(self): + return len(self.rdatasets) + + def __iter__(self): + return iter(self.rdatasets) + + def find_rdataset(self, rdclass, rdtype, covers=dns.rdatatype.NONE, + create=False): + """Find an rdataset matching the specified properties in the + current node. + + *rdclass*, an ``int``, the class of the rdataset. + + *rdtype*, an ``int``, the type of the rdataset. + + *covers*, an ``int``, the covered type. Usually this value is + dns.rdatatype.NONE, but if the rdtype is dns.rdatatype.SIG or + dns.rdatatype.RRSIG, then the covers value will be the rdata + type the SIG/RRSIG covers. The library treats the SIG and RRSIG + types as if they were a family of + types, e.g. RRSIG(A), RRSIG(NS), RRSIG(SOA). This makes RRSIGs much + easier to work with than if RRSIGs covering different rdata + types were aggregated into a single RRSIG rdataset. + + *create*, a ``bool``. If True, create the rdataset if it is not found. + + Raises ``KeyError`` if an rdataset of the desired type and class does + not exist and *create* is not ``True``. + + Returns a ``dns.rdataset.Rdataset``. + """ + + for rds in self.rdatasets: + if rds.match(rdclass, rdtype, covers): + return rds + if not create: + raise KeyError + rds = dns.rdataset.Rdataset(rdclass, rdtype) + self.rdatasets.append(rds) + return rds + + def get_rdataset(self, rdclass, rdtype, covers=dns.rdatatype.NONE, + create=False): + """Get an rdataset matching the specified properties in the + current node. + + None is returned if an rdataset of the specified type and + class does not exist and *create* is not ``True``. + + *rdclass*, an ``int``, the class of the rdataset. + + *rdtype*, an ``int``, the type of the rdataset. + + *covers*, an ``int``, the covered type. Usually this value is + dns.rdatatype.NONE, but if the rdtype is dns.rdatatype.SIG or + dns.rdatatype.RRSIG, then the covers value will be the rdata + type the SIG/RRSIG covers. The library treats the SIG and RRSIG + types as if they were a family of + types, e.g. RRSIG(A), RRSIG(NS), RRSIG(SOA). This makes RRSIGs much + easier to work with than if RRSIGs covering different rdata + types were aggregated into a single RRSIG rdataset. + + *create*, a ``bool``. If True, create the rdataset if it is not found. + + Returns a ``dns.rdataset.Rdataset`` or ``None``. + """ + + try: + rds = self.find_rdataset(rdclass, rdtype, covers, create) + except KeyError: + rds = None + return rds + + def delete_rdataset(self, rdclass, rdtype, covers=dns.rdatatype.NONE): + """Delete the rdataset matching the specified properties in the + current node. + + If a matching rdataset does not exist, it is not an error. + + *rdclass*, an ``int``, the class of the rdataset. + + *rdtype*, an ``int``, the type of the rdataset. + + *covers*, an ``int``, the covered type. + """ + + rds = self.get_rdataset(rdclass, rdtype, covers) + if rds is not None: + self.rdatasets.remove(rds) + + def replace_rdataset(self, replacement): + """Replace an rdataset. + + It is not an error if there is no rdataset matching *replacement*. + + Ownership of the *replacement* object is transferred to the node; + in other words, this method does not store a copy of *replacement* + at the node, it stores *replacement* itself. + + *replacement*, a ``dns.rdataset.Rdataset``. + + Raises ``ValueError`` if *replacement* is not a + ``dns.rdataset.Rdataset``. + """ + + if not isinstance(replacement, dns.rdataset.Rdataset): + raise ValueError('replacement is not an rdataset') + self.delete_rdataset(replacement.rdclass, replacement.rdtype, + replacement.covers) + self.rdatasets.append(replacement) diff --git a/openpype/vendor/python/python_2/dns/opcode.py b/openpype/vendor/python/python_2/dns/opcode.py new file mode 100644 index 0000000000..c0735ba47b --- /dev/null +++ b/openpype/vendor/python/python_2/dns/opcode.py @@ -0,0 +1,119 @@ +# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license + +# Copyright (C) 2001-2017 Nominum, Inc. +# +# Permission to use, copy, modify, and distribute this software and its +# documentation for any purpose with or without fee is hereby granted, +# provided that the above copyright notice and this permission notice +# appear in all copies. +# +# THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES +# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR +# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT +# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. + +"""DNS Opcodes.""" + +import dns.exception + +#: Query +QUERY = 0 +#: Inverse Query (historical) +IQUERY = 1 +#: Server Status (unspecified and unimplemented anywhere) +STATUS = 2 +#: Notify +NOTIFY = 4 +#: Dynamic Update +UPDATE = 5 + +_by_text = { + 'QUERY': QUERY, + 'IQUERY': IQUERY, + 'STATUS': STATUS, + 'NOTIFY': NOTIFY, + 'UPDATE': UPDATE +} + +# We construct the inverse mapping programmatically to ensure that we +# cannot make any mistakes (e.g. omissions, cut-and-paste errors) that +# would cause the mapping not to be true inverse. + +_by_value = {y: x for x, y in _by_text.items()} + + +class UnknownOpcode(dns.exception.DNSException): + """An DNS opcode is unknown.""" + + +def from_text(text): + """Convert text into an opcode. + + *text*, a ``text``, the textual opcode + + Raises ``dns.opcode.UnknownOpcode`` if the opcode is unknown. + + Returns an ``int``. + """ + + if text.isdigit(): + value = int(text) + if value >= 0 and value <= 15: + return value + value = _by_text.get(text.upper()) + if value is None: + raise UnknownOpcode + return value + + +def from_flags(flags): + """Extract an opcode from DNS message flags. + + *flags*, an ``int``, the DNS flags. + + Returns an ``int``. + """ + + return (flags & 0x7800) >> 11 + + +def to_flags(value): + """Convert an opcode to a value suitable for ORing into DNS message + flags. + + *value*, an ``int``, the DNS opcode value. + + Returns an ``int``. + """ + + return (value << 11) & 0x7800 + + +def to_text(value): + """Convert an opcode to text. + + *value*, an ``int`` the opcode value, + + Raises ``dns.opcode.UnknownOpcode`` if the opcode is unknown. + + Returns a ``text``. + """ + + text = _by_value.get(value) + if text is None: + text = str(value) + return text + + +def is_update(flags): + """Is the opcode in flags UPDATE? + + *flags*, an ``int``, the DNS message flags. + + Returns a ``bool``. + """ + + return from_flags(flags) == UPDATE diff --git a/openpype/vendor/python/python_2/dns/py.typed b/openpype/vendor/python/python_2/dns/py.typed new file mode 100644 index 0000000000..e69de29bb2 diff --git a/openpype/vendor/python/python_2/dns/query.py b/openpype/vendor/python/python_2/dns/query.py new file mode 100644 index 0000000000..c0c517ccd4 --- /dev/null +++ b/openpype/vendor/python/python_2/dns/query.py @@ -0,0 +1,683 @@ +# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license + +# Copyright (C) 2003-2017 Nominum, Inc. +# +# Permission to use, copy, modify, and distribute this software and its +# documentation for any purpose with or without fee is hereby granted, +# provided that the above copyright notice and this permission notice +# appear in all copies. +# +# THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES +# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR +# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT +# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. + +"""Talk to a DNS server.""" + +from __future__ import generators + +import errno +import select +import socket +import struct +import sys +import time + +import dns.exception +import dns.inet +import dns.name +import dns.message +import dns.rcode +import dns.rdataclass +import dns.rdatatype +from ._compat import long, string_types, PY3 + +if PY3: + select_error = OSError +else: + select_error = select.error + +# Function used to create a socket. Can be overridden if needed in special +# situations. +socket_factory = socket.socket + +class UnexpectedSource(dns.exception.DNSException): + """A DNS query response came from an unexpected address or port.""" + + +class BadResponse(dns.exception.FormError): + """A DNS query response does not respond to the question asked.""" + + +class TransferError(dns.exception.DNSException): + """A zone transfer response got a non-zero rcode.""" + + def __init__(self, rcode): + message = 'Zone transfer error: %s' % dns.rcode.to_text(rcode) + super(TransferError, self).__init__(message) + self.rcode = rcode + + +def _compute_expiration(timeout): + if timeout is None: + return None + else: + return time.time() + timeout + +# This module can use either poll() or select() as the "polling backend". +# +# A backend function takes an fd, bools for readability, writablity, and +# error detection, and a timeout. + +def _poll_for(fd, readable, writable, error, timeout): + """Poll polling backend.""" + + event_mask = 0 + if readable: + event_mask |= select.POLLIN + if writable: + event_mask |= select.POLLOUT + if error: + event_mask |= select.POLLERR + + pollable = select.poll() + pollable.register(fd, event_mask) + + if timeout: + event_list = pollable.poll(long(timeout * 1000)) + else: + event_list = pollable.poll() + + return bool(event_list) + + +def _select_for(fd, readable, writable, error, timeout): + """Select polling backend.""" + + rset, wset, xset = [], [], [] + + if readable: + rset = [fd] + if writable: + wset = [fd] + if error: + xset = [fd] + + if timeout is None: + (rcount, wcount, xcount) = select.select(rset, wset, xset) + else: + (rcount, wcount, xcount) = select.select(rset, wset, xset, timeout) + + return bool((rcount or wcount or xcount)) + + +def _wait_for(fd, readable, writable, error, expiration): + # Use the selected polling backend to wait for any of the specified + # events. An "expiration" absolute time is converted into a relative + # timeout. + + done = False + while not done: + if expiration is None: + timeout = None + else: + timeout = expiration - time.time() + if timeout <= 0.0: + raise dns.exception.Timeout + try: + if not _polling_backend(fd, readable, writable, error, timeout): + raise dns.exception.Timeout + except select_error as e: + if e.args[0] != errno.EINTR: + raise e + done = True + + +def _set_polling_backend(fn): + # Internal API. Do not use. + + global _polling_backend + + _polling_backend = fn + +if hasattr(select, 'poll'): + # Prefer poll() on platforms that support it because it has no + # limits on the maximum value of a file descriptor (plus it will + # be more efficient for high values). + _polling_backend = _poll_for +else: + _polling_backend = _select_for + + +def _wait_for_readable(s, expiration): + _wait_for(s, True, False, True, expiration) + + +def _wait_for_writable(s, expiration): + _wait_for(s, False, True, True, expiration) + + +def _addresses_equal(af, a1, a2): + # Convert the first value of the tuple, which is a textual format + # address into binary form, so that we are not confused by different + # textual representations of the same address + try: + n1 = dns.inet.inet_pton(af, a1[0]) + n2 = dns.inet.inet_pton(af, a2[0]) + except dns.exception.SyntaxError: + return False + return n1 == n2 and a1[1:] == a2[1:] + + +def _destination_and_source(af, where, port, source, source_port): + # Apply defaults and compute destination and source tuples + # suitable for use in connect(), sendto(), or bind(). + if af is None: + try: + af = dns.inet.af_for_address(where) + except Exception: + af = dns.inet.AF_INET + if af == dns.inet.AF_INET: + destination = (where, port) + if source is not None or source_port != 0: + if source is None: + source = '0.0.0.0' + source = (source, source_port) + elif af == dns.inet.AF_INET6: + destination = (where, port, 0, 0) + if source is not None or source_port != 0: + if source is None: + source = '::' + source = (source, source_port, 0, 0) + return (af, destination, source) + + +def send_udp(sock, what, destination, expiration=None): + """Send a DNS message to the specified UDP socket. + + *sock*, a ``socket``. + + *what*, a ``binary`` or ``dns.message.Message``, the message to send. + + *destination*, a destination tuple appropriate for the address family + of the socket, specifying where to send the query. + + *expiration*, a ``float`` or ``None``, the absolute time at which + a timeout exception should be raised. If ``None``, no timeout will + occur. + + Returns an ``(int, float)`` tuple of bytes sent and the sent time. + """ + + if isinstance(what, dns.message.Message): + what = what.to_wire() + _wait_for_writable(sock, expiration) + sent_time = time.time() + n = sock.sendto(what, destination) + return (n, sent_time) + + +def receive_udp(sock, destination, expiration=None, + ignore_unexpected=False, one_rr_per_rrset=False, + keyring=None, request_mac=b'', ignore_trailing=False): + """Read a DNS message from a UDP socket. + + *sock*, a ``socket``. + + *destination*, a destination tuple appropriate for the address family + of the socket, specifying where the associated query was sent. + + *expiration*, a ``float`` or ``None``, the absolute time at which + a timeout exception should be raised. If ``None``, no timeout will + occur. + + *ignore_unexpected*, a ``bool``. If ``True``, ignore responses from + unexpected sources. + + *one_rr_per_rrset*, a ``bool``. If ``True``, put each RR into its own + RRset. + + *keyring*, a ``dict``, the keyring to use for TSIG. + + *request_mac*, a ``binary``, the MAC of the request (for TSIG). + + *ignore_trailing*, a ``bool``. If ``True``, ignore trailing + junk at end of the received message. + + Raises if the message is malformed, if network errors occur, of if + there is a timeout. + + Returns a ``dns.message.Message`` object. + """ + + wire = b'' + while 1: + _wait_for_readable(sock, expiration) + (wire, from_address) = sock.recvfrom(65535) + if _addresses_equal(sock.family, from_address, destination) or \ + (dns.inet.is_multicast(destination[0]) and + from_address[1:] == destination[1:]): + break + if not ignore_unexpected: + raise UnexpectedSource('got a response from ' + '%s instead of %s' % (from_address, + destination)) + received_time = time.time() + r = dns.message.from_wire(wire, keyring=keyring, request_mac=request_mac, + one_rr_per_rrset=one_rr_per_rrset, + ignore_trailing=ignore_trailing) + return (r, received_time) + +def udp(q, where, timeout=None, port=53, af=None, source=None, source_port=0, + ignore_unexpected=False, one_rr_per_rrset=False, ignore_trailing=False): + """Return the response obtained after sending a query via UDP. + + *q*, a ``dns.message.Message``, the query to send + + *where*, a ``text`` containing an IPv4 or IPv6 address, where + to send the message. + + *timeout*, a ``float`` or ``None``, the number of seconds to wait before the + query times out. If ``None``, the default, wait forever. + + *port*, an ``int``, the port send the message to. The default is 53. + + *af*, an ``int``, the address family to use. The default is ``None``, + which causes the address family to use to be inferred from the form of + *where*. If the inference attempt fails, AF_INET is used. This + parameter is historical; you need never set it. + + *source*, a ``text`` containing an IPv4 or IPv6 address, specifying + the source address. The default is the wildcard address. + + *source_port*, an ``int``, the port from which to send the message. + The default is 0. + + *ignore_unexpected*, a ``bool``. If ``True``, ignore responses from + unexpected sources. + + *one_rr_per_rrset*, a ``bool``. If ``True``, put each RR into its own + RRset. + + *ignore_trailing*, a ``bool``. If ``True``, ignore trailing + junk at end of the received message. + + Returns a ``dns.message.Message``. + """ + + wire = q.to_wire() + (af, destination, source) = _destination_and_source(af, where, port, + source, source_port) + s = socket_factory(af, socket.SOCK_DGRAM, 0) + received_time = None + sent_time = None + try: + expiration = _compute_expiration(timeout) + s.setblocking(0) + if source is not None: + s.bind(source) + (_, sent_time) = send_udp(s, wire, destination, expiration) + (r, received_time) = receive_udp(s, destination, expiration, + ignore_unexpected, one_rr_per_rrset, + q.keyring, q.mac, ignore_trailing) + finally: + if sent_time is None or received_time is None: + response_time = 0 + else: + response_time = received_time - sent_time + s.close() + r.time = response_time + if not q.is_response(r): + raise BadResponse + return r + + +def _net_read(sock, count, expiration): + """Read the specified number of bytes from sock. Keep trying until we + either get the desired amount, or we hit EOF. + A Timeout exception will be raised if the operation is not completed + by the expiration time. + """ + s = b'' + while count > 0: + _wait_for_readable(sock, expiration) + n = sock.recv(count) + if n == b'': + raise EOFError + count = count - len(n) + s = s + n + return s + + +def _net_write(sock, data, expiration): + """Write the specified data to the socket. + A Timeout exception will be raised if the operation is not completed + by the expiration time. + """ + current = 0 + l = len(data) + while current < l: + _wait_for_writable(sock, expiration) + current += sock.send(data[current:]) + + +def send_tcp(sock, what, expiration=None): + """Send a DNS message to the specified TCP socket. + + *sock*, a ``socket``. + + *what*, a ``binary`` or ``dns.message.Message``, the message to send. + + *expiration*, a ``float`` or ``None``, the absolute time at which + a timeout exception should be raised. If ``None``, no timeout will + occur. + + Returns an ``(int, float)`` tuple of bytes sent and the sent time. + """ + + if isinstance(what, dns.message.Message): + what = what.to_wire() + l = len(what) + # copying the wire into tcpmsg is inefficient, but lets us + # avoid writev() or doing a short write that would get pushed + # onto the net + tcpmsg = struct.pack("!H", l) + what + _wait_for_writable(sock, expiration) + sent_time = time.time() + _net_write(sock, tcpmsg, expiration) + return (len(tcpmsg), sent_time) + +def receive_tcp(sock, expiration=None, one_rr_per_rrset=False, + keyring=None, request_mac=b'', ignore_trailing=False): + """Read a DNS message from a TCP socket. + + *sock*, a ``socket``. + + *expiration*, a ``float`` or ``None``, the absolute time at which + a timeout exception should be raised. If ``None``, no timeout will + occur. + + *one_rr_per_rrset*, a ``bool``. If ``True``, put each RR into its own + RRset. + + *keyring*, a ``dict``, the keyring to use for TSIG. + + *request_mac*, a ``binary``, the MAC of the request (for TSIG). + + *ignore_trailing*, a ``bool``. If ``True``, ignore trailing + junk at end of the received message. + + Raises if the message is malformed, if network errors occur, of if + there is a timeout. + + Returns a ``dns.message.Message`` object. + """ + + ldata = _net_read(sock, 2, expiration) + (l,) = struct.unpack("!H", ldata) + wire = _net_read(sock, l, expiration) + received_time = time.time() + r = dns.message.from_wire(wire, keyring=keyring, request_mac=request_mac, + one_rr_per_rrset=one_rr_per_rrset, + ignore_trailing=ignore_trailing) + return (r, received_time) + +def _connect(s, address): + try: + s.connect(address) + except socket.error: + (ty, v) = sys.exc_info()[:2] + + if hasattr(v, 'errno'): + v_err = v.errno + else: + v_err = v[0] + if v_err not in [errno.EINPROGRESS, errno.EWOULDBLOCK, errno.EALREADY]: + raise v + + +def tcp(q, where, timeout=None, port=53, af=None, source=None, source_port=0, + one_rr_per_rrset=False, ignore_trailing=False): + """Return the response obtained after sending a query via TCP. + + *q*, a ``dns.message.Message``, the query to send + + *where*, a ``text`` containing an IPv4 or IPv6 address, where + to send the message. + + *timeout*, a ``float`` or ``None``, the number of seconds to wait before the + query times out. If ``None``, the default, wait forever. + + *port*, an ``int``, the port send the message to. The default is 53. + + *af*, an ``int``, the address family to use. The default is ``None``, + which causes the address family to use to be inferred from the form of + *where*. If the inference attempt fails, AF_INET is used. This + parameter is historical; you need never set it. + + *source*, a ``text`` containing an IPv4 or IPv6 address, specifying + the source address. The default is the wildcard address. + + *source_port*, an ``int``, the port from which to send the message. + The default is 0. + + *one_rr_per_rrset*, a ``bool``. If ``True``, put each RR into its own + RRset. + + *ignore_trailing*, a ``bool``. If ``True``, ignore trailing + junk at end of the received message. + + Returns a ``dns.message.Message``. + """ + + wire = q.to_wire() + (af, destination, source) = _destination_and_source(af, where, port, + source, source_port) + s = socket_factory(af, socket.SOCK_STREAM, 0) + begin_time = None + received_time = None + try: + expiration = _compute_expiration(timeout) + s.setblocking(0) + begin_time = time.time() + if source is not None: + s.bind(source) + _connect(s, destination) + send_tcp(s, wire, expiration) + (r, received_time) = receive_tcp(s, expiration, one_rr_per_rrset, + q.keyring, q.mac, ignore_trailing) + finally: + if begin_time is None or received_time is None: + response_time = 0 + else: + response_time = received_time - begin_time + s.close() + r.time = response_time + if not q.is_response(r): + raise BadResponse + return r + + +def xfr(where, zone, rdtype=dns.rdatatype.AXFR, rdclass=dns.rdataclass.IN, + timeout=None, port=53, keyring=None, keyname=None, relativize=True, + af=None, lifetime=None, source=None, source_port=0, serial=0, + use_udp=False, keyalgorithm=dns.tsig.default_algorithm): + """Return a generator for the responses to a zone transfer. + + *where*. If the inference attempt fails, AF_INET is used. This + parameter is historical; you need never set it. + + *zone*, a ``dns.name.Name`` or ``text``, the name of the zone to transfer. + + *rdtype*, an ``int`` or ``text``, the type of zone transfer. The + default is ``dns.rdatatype.AXFR``. ``dns.rdatatype.IXFR`` can be + used to do an incremental transfer instead. + + *rdclass*, an ``int`` or ``text``, the class of the zone transfer. + The default is ``dns.rdataclass.IN``. + + *timeout*, a ``float``, the number of seconds to wait for each + response message. If None, the default, wait forever. + + *port*, an ``int``, the port send the message to. The default is 53. + + *keyring*, a ``dict``, the keyring to use for TSIG. + + *keyname*, a ``dns.name.Name`` or ``text``, the name of the TSIG + key to use. + + *relativize*, a ``bool``. If ``True``, all names in the zone will be + relativized to the zone origin. It is essential that the + relativize setting matches the one specified to + ``dns.zone.from_xfr()`` if using this generator to make a zone. + + *af*, an ``int``, the address family to use. The default is ``None``, + which causes the address family to use to be inferred from the form of + *where*. If the inference attempt fails, AF_INET is used. This + parameter is historical; you need never set it. + + *lifetime*, a ``float``, the total number of seconds to spend + doing the transfer. If ``None``, the default, then there is no + limit on the time the transfer may take. + + *source*, a ``text`` containing an IPv4 or IPv6 address, specifying + the source address. The default is the wildcard address. + + *source_port*, an ``int``, the port from which to send the message. + The default is 0. + + *serial*, an ``int``, the SOA serial number to use as the base for + an IXFR diff sequence (only meaningful if *rdtype* is + ``dns.rdatatype.IXFR``). + + *use_udp*, a ``bool``. If ``True``, use UDP (only meaningful for IXFR). + + *keyalgorithm*, a ``dns.name.Name`` or ``text``, the TSIG algorithm to use. + + Raises on errors, and so does the generator. + + Returns a generator of ``dns.message.Message`` objects. + """ + + if isinstance(zone, string_types): + zone = dns.name.from_text(zone) + if isinstance(rdtype, string_types): + rdtype = dns.rdatatype.from_text(rdtype) + q = dns.message.make_query(zone, rdtype, rdclass) + if rdtype == dns.rdatatype.IXFR: + rrset = dns.rrset.from_text(zone, 0, 'IN', 'SOA', + '. . %u 0 0 0 0' % serial) + q.authority.append(rrset) + if keyring is not None: + q.use_tsig(keyring, keyname, algorithm=keyalgorithm) + wire = q.to_wire() + (af, destination, source) = _destination_and_source(af, where, port, + source, source_port) + if use_udp: + if rdtype != dns.rdatatype.IXFR: + raise ValueError('cannot do a UDP AXFR') + s = socket_factory(af, socket.SOCK_DGRAM, 0) + else: + s = socket_factory(af, socket.SOCK_STREAM, 0) + s.setblocking(0) + if source is not None: + s.bind(source) + expiration = _compute_expiration(lifetime) + _connect(s, destination) + l = len(wire) + if use_udp: + _wait_for_writable(s, expiration) + s.send(wire) + else: + tcpmsg = struct.pack("!H", l) + wire + _net_write(s, tcpmsg, expiration) + done = False + delete_mode = True + expecting_SOA = False + soa_rrset = None + if relativize: + origin = zone + oname = dns.name.empty + else: + origin = None + oname = zone + tsig_ctx = None + first = True + while not done: + mexpiration = _compute_expiration(timeout) + if mexpiration is None or mexpiration > expiration: + mexpiration = expiration + if use_udp: + _wait_for_readable(s, expiration) + (wire, from_address) = s.recvfrom(65535) + else: + ldata = _net_read(s, 2, mexpiration) + (l,) = struct.unpack("!H", ldata) + wire = _net_read(s, l, mexpiration) + is_ixfr = (rdtype == dns.rdatatype.IXFR) + r = dns.message.from_wire(wire, keyring=q.keyring, request_mac=q.mac, + xfr=True, origin=origin, tsig_ctx=tsig_ctx, + multi=True, first=first, + one_rr_per_rrset=is_ixfr) + rcode = r.rcode() + if rcode != dns.rcode.NOERROR: + raise TransferError(rcode) + tsig_ctx = r.tsig_ctx + first = False + answer_index = 0 + if soa_rrset is None: + if not r.answer or r.answer[0].name != oname: + raise dns.exception.FormError( + "No answer or RRset not for qname") + rrset = r.answer[0] + if rrset.rdtype != dns.rdatatype.SOA: + raise dns.exception.FormError("first RRset is not an SOA") + answer_index = 1 + soa_rrset = rrset.copy() + if rdtype == dns.rdatatype.IXFR: + if soa_rrset[0].serial <= serial: + # + # We're already up-to-date. + # + done = True + else: + expecting_SOA = True + # + # Process SOAs in the answer section (other than the initial + # SOA in the first message). + # + for rrset in r.answer[answer_index:]: + if done: + raise dns.exception.FormError("answers after final SOA") + if rrset.rdtype == dns.rdatatype.SOA and rrset.name == oname: + if expecting_SOA: + if rrset[0].serial != serial: + raise dns.exception.FormError( + "IXFR base serial mismatch") + expecting_SOA = False + elif rdtype == dns.rdatatype.IXFR: + delete_mode = not delete_mode + # + # If this SOA RRset is equal to the first we saw then we're + # finished. If this is an IXFR we also check that we're seeing + # the record in the expected part of the response. + # + if rrset == soa_rrset and \ + (rdtype == dns.rdatatype.AXFR or + (rdtype == dns.rdatatype.IXFR and delete_mode)): + done = True + elif expecting_SOA: + # + # We made an IXFR request and are expecting another + # SOA RR, but saw something else, so this must be an + # AXFR response. + # + rdtype = dns.rdatatype.AXFR + expecting_SOA = False + if done and q.keyring and not r.had_tsig: + raise dns.exception.FormError("missing TSIG") + yield r + s.close() diff --git a/openpype/vendor/python/python_2/dns/rcode.py b/openpype/vendor/python/python_2/dns/rcode.py new file mode 100644 index 0000000000..5191e1b18c --- /dev/null +++ b/openpype/vendor/python/python_2/dns/rcode.py @@ -0,0 +1,144 @@ +# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license + +# Copyright (C) 2001-2017 Nominum, Inc. +# +# Permission to use, copy, modify, and distribute this software and its +# documentation for any purpose with or without fee is hereby granted, +# provided that the above copyright notice and this permission notice +# appear in all copies. +# +# THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES +# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR +# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT +# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. + +"""DNS Result Codes.""" + +import dns.exception +from ._compat import long + +#: No error +NOERROR = 0 +#: Form error +FORMERR = 1 +#: Server failure +SERVFAIL = 2 +#: Name does not exist ("Name Error" in RFC 1025 terminology). +NXDOMAIN = 3 +#: Not implemented +NOTIMP = 4 +#: Refused +REFUSED = 5 +#: Name exists. +YXDOMAIN = 6 +#: RRset exists. +YXRRSET = 7 +#: RRset does not exist. +NXRRSET = 8 +#: Not authoritative. +NOTAUTH = 9 +#: Name not in zone. +NOTZONE = 10 +#: Bad EDNS version. +BADVERS = 16 + +_by_text = { + 'NOERROR': NOERROR, + 'FORMERR': FORMERR, + 'SERVFAIL': SERVFAIL, + 'NXDOMAIN': NXDOMAIN, + 'NOTIMP': NOTIMP, + 'REFUSED': REFUSED, + 'YXDOMAIN': YXDOMAIN, + 'YXRRSET': YXRRSET, + 'NXRRSET': NXRRSET, + 'NOTAUTH': NOTAUTH, + 'NOTZONE': NOTZONE, + 'BADVERS': BADVERS +} + +# We construct the inverse mapping programmatically to ensure that we +# cannot make any mistakes (e.g. omissions, cut-and-paste errors) that +# would cause the mapping not to be a true inverse. + +_by_value = {y: x for x, y in _by_text.items()} + + +class UnknownRcode(dns.exception.DNSException): + """A DNS rcode is unknown.""" + + +def from_text(text): + """Convert text into an rcode. + + *text*, a ``text``, the textual rcode or an integer in textual form. + + Raises ``dns.rcode.UnknownRcode`` if the rcode mnemonic is unknown. + + Returns an ``int``. + """ + + if text.isdigit(): + v = int(text) + if v >= 0 and v <= 4095: + return v + v = _by_text.get(text.upper()) + if v is None: + raise UnknownRcode + return v + + +def from_flags(flags, ednsflags): + """Return the rcode value encoded by flags and ednsflags. + + *flags*, an ``int``, the DNS flags field. + + *ednsflags*, an ``int``, the EDNS flags field. + + Raises ``ValueError`` if rcode is < 0 or > 4095 + + Returns an ``int``. + """ + + value = (flags & 0x000f) | ((ednsflags >> 20) & 0xff0) + if value < 0 or value > 4095: + raise ValueError('rcode must be >= 0 and <= 4095') + return value + + +def to_flags(value): + """Return a (flags, ednsflags) tuple which encodes the rcode. + + *value*, an ``int``, the rcode. + + Raises ``ValueError`` if rcode is < 0 or > 4095. + + Returns an ``(int, int)`` tuple. + """ + + if value < 0 or value > 4095: + raise ValueError('rcode must be >= 0 and <= 4095') + v = value & 0xf + ev = long(value & 0xff0) << 20 + return (v, ev) + + +def to_text(value): + """Convert rcode into text. + + *value*, and ``int``, the rcode. + + Raises ``ValueError`` if rcode is < 0 or > 4095. + + Returns a ``text``. + """ + + if value < 0 or value > 4095: + raise ValueError('rcode must be >= 0 and <= 4095') + text = _by_value.get(value) + if text is None: + text = str(value) + return text diff --git a/openpype/vendor/python/python_2/dns/rdata.py b/openpype/vendor/python/python_2/dns/rdata.py new file mode 100644 index 0000000000..ea1971dc5f --- /dev/null +++ b/openpype/vendor/python/python_2/dns/rdata.py @@ -0,0 +1,456 @@ +# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license + +# Copyright (C) 2001-2017 Nominum, Inc. +# +# Permission to use, copy, modify, and distribute this software and its +# documentation for any purpose with or without fee is hereby granted, +# provided that the above copyright notice and this permission notice +# appear in all copies. +# +# THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES +# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR +# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT +# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. + +"""DNS rdata.""" + +from io import BytesIO +import base64 +import binascii + +import dns.exception +import dns.name +import dns.rdataclass +import dns.rdatatype +import dns.tokenizer +import dns.wiredata +from ._compat import xrange, string_types, text_type + +try: + import threading as _threading +except ImportError: + import dummy_threading as _threading + +_hex_chunksize = 32 + + +def _hexify(data, chunksize=_hex_chunksize): + """Convert a binary string into its hex encoding, broken up into chunks + of chunksize characters separated by a space. + """ + + line = binascii.hexlify(data) + return b' '.join([line[i:i + chunksize] + for i + in range(0, len(line), chunksize)]).decode() + +_base64_chunksize = 32 + + +def _base64ify(data, chunksize=_base64_chunksize): + """Convert a binary string into its base64 encoding, broken up into chunks + of chunksize characters separated by a space. + """ + + line = base64.b64encode(data) + return b' '.join([line[i:i + chunksize] + for i + in range(0, len(line), chunksize)]).decode() + +__escaped = bytearray(b'"\\') + +def _escapify(qstring): + """Escape the characters in a quoted string which need it.""" + + if isinstance(qstring, text_type): + qstring = qstring.encode() + if not isinstance(qstring, bytearray): + qstring = bytearray(qstring) + + text = '' + for c in qstring: + if c in __escaped: + text += '\\' + chr(c) + elif c >= 0x20 and c < 0x7F: + text += chr(c) + else: + text += '\\%03d' % c + return text + + +def _truncate_bitmap(what): + """Determine the index of greatest byte that isn't all zeros, and + return the bitmap that contains all the bytes less than that index. + """ + + for i in xrange(len(what) - 1, -1, -1): + if what[i] != 0: + return what[0: i + 1] + return what[0:1] + + +class Rdata(object): + """Base class for all DNS rdata types.""" + + __slots__ = ['rdclass', 'rdtype'] + + def __init__(self, rdclass, rdtype): + """Initialize an rdata. + + *rdclass*, an ``int`` is the rdataclass of the Rdata. + *rdtype*, an ``int`` is the rdatatype of the Rdata. + """ + + self.rdclass = rdclass + self.rdtype = rdtype + + def covers(self): + """Return the type a Rdata covers. + + DNS SIG/RRSIG rdatas apply to a specific type; this type is + returned by the covers() function. If the rdata type is not + SIG or RRSIG, dns.rdatatype.NONE is returned. This is useful when + creating rdatasets, allowing the rdataset to contain only RRSIGs + of a particular type, e.g. RRSIG(NS). + + Returns an ``int``. + """ + + return dns.rdatatype.NONE + + def extended_rdatatype(self): + """Return a 32-bit type value, the least significant 16 bits of + which are the ordinary DNS type, and the upper 16 bits of which are + the "covered" type, if any. + + Returns an ``int``. + """ + + return self.covers() << 16 | self.rdtype + + def to_text(self, origin=None, relativize=True, **kw): + """Convert an rdata to text format. + + Returns a ``text``. + """ + + raise NotImplementedError + + def to_wire(self, file, compress=None, origin=None): + """Convert an rdata to wire format. + + Returns a ``binary``. + """ + + raise NotImplementedError + + def to_digestable(self, origin=None): + """Convert rdata to a format suitable for digesting in hashes. This + is also the DNSSEC canonical form. + + Returns a ``binary``. + """ + + f = BytesIO() + self.to_wire(f, None, origin) + return f.getvalue() + + def validate(self): + """Check that the current contents of the rdata's fields are + valid. + + If you change an rdata by assigning to its fields, + it is a good idea to call validate() when you are done making + changes. + + Raises various exceptions if there are problems. + + Returns ``None``. + """ + + dns.rdata.from_text(self.rdclass, self.rdtype, self.to_text()) + + def __repr__(self): + covers = self.covers() + if covers == dns.rdatatype.NONE: + ctext = '' + else: + ctext = '(' + dns.rdatatype.to_text(covers) + ')' + return '' + + def __str__(self): + return self.to_text() + + def _cmp(self, other): + """Compare an rdata with another rdata of the same rdtype and + rdclass. + + Return < 0 if self < other in the DNSSEC ordering, 0 if self + == other, and > 0 if self > other. + + """ + our = self.to_digestable(dns.name.root) + their = other.to_digestable(dns.name.root) + if our == their: + return 0 + elif our > their: + return 1 + else: + return -1 + + def __eq__(self, other): + if not isinstance(other, Rdata): + return False + if self.rdclass != other.rdclass or self.rdtype != other.rdtype: + return False + return self._cmp(other) == 0 + + def __ne__(self, other): + if not isinstance(other, Rdata): + return True + if self.rdclass != other.rdclass or self.rdtype != other.rdtype: + return True + return self._cmp(other) != 0 + + def __lt__(self, other): + if not isinstance(other, Rdata) or \ + self.rdclass != other.rdclass or self.rdtype != other.rdtype: + + return NotImplemented + return self._cmp(other) < 0 + + def __le__(self, other): + if not isinstance(other, Rdata) or \ + self.rdclass != other.rdclass or self.rdtype != other.rdtype: + return NotImplemented + return self._cmp(other) <= 0 + + def __ge__(self, other): + if not isinstance(other, Rdata) or \ + self.rdclass != other.rdclass or self.rdtype != other.rdtype: + return NotImplemented + return self._cmp(other) >= 0 + + def __gt__(self, other): + if not isinstance(other, Rdata) or \ + self.rdclass != other.rdclass or self.rdtype != other.rdtype: + return NotImplemented + return self._cmp(other) > 0 + + def __hash__(self): + return hash(self.to_digestable(dns.name.root)) + + @classmethod + def from_text(cls, rdclass, rdtype, tok, origin=None, relativize=True): + raise NotImplementedError + + @classmethod + def from_wire(cls, rdclass, rdtype, wire, current, rdlen, origin=None): + raise NotImplementedError + + def choose_relativity(self, origin=None, relativize=True): + """Convert any domain names in the rdata to the specified + relativization. + """ + +class GenericRdata(Rdata): + + """Generic Rdata Class + + This class is used for rdata types for which we have no better + implementation. It implements the DNS "unknown RRs" scheme. + """ + + __slots__ = ['data'] + + def __init__(self, rdclass, rdtype, data): + super(GenericRdata, self).__init__(rdclass, rdtype) + self.data = data + + def to_text(self, origin=None, relativize=True, **kw): + return r'\# %d ' % len(self.data) + _hexify(self.data) + + @classmethod + def from_text(cls, rdclass, rdtype, tok, origin=None, relativize=True): + token = tok.get() + if not token.is_identifier() or token.value != r'\#': + raise dns.exception.SyntaxError( + r'generic rdata does not start with \#') + length = tok.get_int() + chunks = [] + while 1: + token = tok.get() + if token.is_eol_or_eof(): + break + chunks.append(token.value.encode()) + hex = b''.join(chunks) + data = binascii.unhexlify(hex) + if len(data) != length: + raise dns.exception.SyntaxError( + 'generic rdata hex data has wrong length') + return cls(rdclass, rdtype, data) + + def to_wire(self, file, compress=None, origin=None): + file.write(self.data) + + @classmethod + def from_wire(cls, rdclass, rdtype, wire, current, rdlen, origin=None): + return cls(rdclass, rdtype, wire[current: current + rdlen]) + +_rdata_modules = {} +_module_prefix = 'dns.rdtypes' +_import_lock = _threading.Lock() + +def get_rdata_class(rdclass, rdtype): + + def import_module(name): + with _import_lock: + mod = __import__(name) + components = name.split('.') + for comp in components[1:]: + mod = getattr(mod, comp) + return mod + + mod = _rdata_modules.get((rdclass, rdtype)) + rdclass_text = dns.rdataclass.to_text(rdclass) + rdtype_text = dns.rdatatype.to_text(rdtype) + rdtype_text = rdtype_text.replace('-', '_') + if not mod: + mod = _rdata_modules.get((dns.rdatatype.ANY, rdtype)) + if not mod: + try: + mod = import_module('.'.join([_module_prefix, + rdclass_text, rdtype_text])) + _rdata_modules[(rdclass, rdtype)] = mod + except ImportError: + try: + mod = import_module('.'.join([_module_prefix, + 'ANY', rdtype_text])) + _rdata_modules[(dns.rdataclass.ANY, rdtype)] = mod + except ImportError: + mod = None + if mod: + cls = getattr(mod, rdtype_text) + else: + cls = GenericRdata + return cls + + +def from_text(rdclass, rdtype, tok, origin=None, relativize=True): + """Build an rdata object from text format. + + This function attempts to dynamically load a class which + implements the specified rdata class and type. If there is no + class-and-type-specific implementation, the GenericRdata class + is used. + + Once a class is chosen, its from_text() class method is called + with the parameters to this function. + + If *tok* is a ``text``, then a tokenizer is created and the string + is used as its input. + + *rdclass*, an ``int``, the rdataclass. + + *rdtype*, an ``int``, the rdatatype. + + *tok*, a ``dns.tokenizer.Tokenizer`` or a ``text``. + + *origin*, a ``dns.name.Name`` (or ``None``), the + origin to use for relative names. + + *relativize*, a ``bool``. If true, name will be relativized to + the specified origin. + + Returns an instance of the chosen Rdata subclass. + """ + + if isinstance(tok, string_types): + tok = dns.tokenizer.Tokenizer(tok) + cls = get_rdata_class(rdclass, rdtype) + if cls != GenericRdata: + # peek at first token + token = tok.get() + tok.unget(token) + if token.is_identifier() and \ + token.value == r'\#': + # + # Known type using the generic syntax. Extract the + # wire form from the generic syntax, and then run + # from_wire on it. + # + rdata = GenericRdata.from_text(rdclass, rdtype, tok, origin, + relativize) + return from_wire(rdclass, rdtype, rdata.data, 0, len(rdata.data), + origin) + return cls.from_text(rdclass, rdtype, tok, origin, relativize) + + +def from_wire(rdclass, rdtype, wire, current, rdlen, origin=None): + """Build an rdata object from wire format + + This function attempts to dynamically load a class which + implements the specified rdata class and type. If there is no + class-and-type-specific implementation, the GenericRdata class + is used. + + Once a class is chosen, its from_wire() class method is called + with the parameters to this function. + + *rdclass*, an ``int``, the rdataclass. + + *rdtype*, an ``int``, the rdatatype. + + *wire*, a ``binary``, the wire-format message. + + *current*, an ``int``, the offset in wire of the beginning of + the rdata. + + *rdlen*, an ``int``, the length of the wire-format rdata + + *origin*, a ``dns.name.Name`` (or ``None``). If not ``None``, + then names will be relativized to this origin. + + Returns an instance of the chosen Rdata subclass. + """ + + wire = dns.wiredata.maybe_wrap(wire) + cls = get_rdata_class(rdclass, rdtype) + return cls.from_wire(rdclass, rdtype, wire, current, rdlen, origin) + + +class RdatatypeExists(dns.exception.DNSException): + """DNS rdatatype already exists.""" + supp_kwargs = {'rdclass', 'rdtype'} + fmt = "The rdata type with class {rdclass} and rdtype {rdtype} " + \ + "already exists." + + +def register_type(implementation, rdtype, rdtype_text, is_singleton=False, + rdclass=dns.rdataclass.IN): + """Dynamically register a module to handle an rdatatype. + + *implementation*, a module implementing the type in the usual dnspython + way. + + *rdtype*, an ``int``, the rdatatype to register. + + *rdtype_text*, a ``text``, the textual form of the rdatatype. + + *is_singleton*, a ``bool``, indicating if the type is a singleton (i.e. + RRsets of the type can have only one member.) + + *rdclass*, the rdataclass of the type, or ``dns.rdataclass.ANY`` if + it applies to all classes. + """ + + existing_cls = get_rdata_class(rdclass, rdtype) + if existing_cls != GenericRdata: + raise RdatatypeExists(rdclass=rdclass, rdtype=rdtype) + _rdata_modules[(rdclass, rdtype)] = implementation + dns.rdatatype.register_type(rdtype, rdtype_text, is_singleton) diff --git a/openpype/vendor/python/python_2/dns/rdataclass.py b/openpype/vendor/python/python_2/dns/rdataclass.py new file mode 100644 index 0000000000..b88aa85b7b --- /dev/null +++ b/openpype/vendor/python/python_2/dns/rdataclass.py @@ -0,0 +1,122 @@ +# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license + +# Copyright (C) 2001-2017 Nominum, Inc. +# +# Permission to use, copy, modify, and distribute this software and its +# documentation for any purpose with or without fee is hereby granted, +# provided that the above copyright notice and this permission notice +# appear in all copies. +# +# THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES +# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR +# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT +# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. + +"""DNS Rdata Classes.""" + +import re + +import dns.exception + +RESERVED0 = 0 +IN = 1 +CH = 3 +HS = 4 +NONE = 254 +ANY = 255 + +_by_text = { + 'RESERVED0': RESERVED0, + 'IN': IN, + 'CH': CH, + 'HS': HS, + 'NONE': NONE, + 'ANY': ANY +} + +# We construct the inverse mapping programmatically to ensure that we +# cannot make any mistakes (e.g. omissions, cut-and-paste errors) that +# would cause the mapping not to be true inverse. + +_by_value = {y: x for x, y in _by_text.items()} + +# Now that we've built the inverse map, we can add class aliases to +# the _by_text mapping. + +_by_text.update({ + 'INTERNET': IN, + 'CHAOS': CH, + 'HESIOD': HS +}) + +_metaclasses = { + NONE: True, + ANY: True +} + +_unknown_class_pattern = re.compile('CLASS([0-9]+)$', re.I) + + +class UnknownRdataclass(dns.exception.DNSException): + """A DNS class is unknown.""" + + +def from_text(text): + """Convert text into a DNS rdata class value. + + The input text can be a defined DNS RR class mnemonic or + instance of the DNS generic class syntax. + + For example, "IN" and "CLASS1" will both result in a value of 1. + + Raises ``dns.rdatatype.UnknownRdataclass`` if the class is unknown. + + Raises ``ValueError`` if the rdata class value is not >= 0 and <= 65535. + + Returns an ``int``. + """ + + value = _by_text.get(text.upper()) + if value is None: + match = _unknown_class_pattern.match(text) + if match is None: + raise UnknownRdataclass + value = int(match.group(1)) + if value < 0 or value > 65535: + raise ValueError("class must be between >= 0 and <= 65535") + return value + + +def to_text(value): + """Convert a DNS rdata type value to text. + + If the value has a known mnemonic, it will be used, otherwise the + DNS generic class syntax will be used. + + Raises ``ValueError`` if the rdata class value is not >= 0 and <= 65535. + + Returns a ``str``. + """ + + if value < 0 or value > 65535: + raise ValueError("class must be between >= 0 and <= 65535") + text = _by_value.get(value) + if text is None: + text = 'CLASS' + repr(value) + return text + + +def is_metaclass(rdclass): + """True if the specified class is a metaclass. + + The currently defined metaclasses are ANY and NONE. + + *rdclass* is an ``int``. + """ + + if rdclass in _metaclasses: + return True + return False diff --git a/openpype/vendor/python/python_2/dns/rdataset.py b/openpype/vendor/python/python_2/dns/rdataset.py new file mode 100644 index 0000000000..f1afe24198 --- /dev/null +++ b/openpype/vendor/python/python_2/dns/rdataset.py @@ -0,0 +1,347 @@ +# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license + +# Copyright (C) 2001-2017 Nominum, Inc. +# +# Permission to use, copy, modify, and distribute this software and its +# documentation for any purpose with or without fee is hereby granted, +# provided that the above copyright notice and this permission notice +# appear in all copies. +# +# THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES +# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR +# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT +# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. + +"""DNS rdatasets (an rdataset is a set of rdatas of a given type and class)""" + +import random +from io import StringIO +import struct + +import dns.exception +import dns.rdatatype +import dns.rdataclass +import dns.rdata +import dns.set +from ._compat import string_types + +# define SimpleSet here for backwards compatibility +SimpleSet = dns.set.Set + + +class DifferingCovers(dns.exception.DNSException): + """An attempt was made to add a DNS SIG/RRSIG whose covered type + is not the same as that of the other rdatas in the rdataset.""" + + +class IncompatibleTypes(dns.exception.DNSException): + """An attempt was made to add DNS RR data of an incompatible type.""" + + +class Rdataset(dns.set.Set): + + """A DNS rdataset.""" + + __slots__ = ['rdclass', 'rdtype', 'covers', 'ttl'] + + def __init__(self, rdclass, rdtype, covers=dns.rdatatype.NONE, ttl=0): + """Create a new rdataset of the specified class and type. + + *rdclass*, an ``int``, the rdataclass. + + *rdtype*, an ``int``, the rdatatype. + + *covers*, an ``int``, the covered rdatatype. + + *ttl*, an ``int``, the TTL. + """ + + super(Rdataset, self).__init__() + self.rdclass = rdclass + self.rdtype = rdtype + self.covers = covers + self.ttl = ttl + + def _clone(self): + obj = super(Rdataset, self)._clone() + obj.rdclass = self.rdclass + obj.rdtype = self.rdtype + obj.covers = self.covers + obj.ttl = self.ttl + return obj + + def update_ttl(self, ttl): + """Perform TTL minimization. + + Set the TTL of the rdataset to be the lesser of the set's current + TTL or the specified TTL. If the set contains no rdatas, set the TTL + to the specified TTL. + + *ttl*, an ``int``. + """ + + if len(self) == 0: + self.ttl = ttl + elif ttl < self.ttl: + self.ttl = ttl + + def add(self, rd, ttl=None): + """Add the specified rdata to the rdataset. + + If the optional *ttl* parameter is supplied, then + ``self.update_ttl(ttl)`` will be called prior to adding the rdata. + + *rd*, a ``dns.rdata.Rdata``, the rdata + + *ttl*, an ``int``, the TTL. + + Raises ``dns.rdataset.IncompatibleTypes`` if the type and class + do not match the type and class of the rdataset. + + Raises ``dns.rdataset.DifferingCovers`` if the type is a signature + type and the covered type does not match that of the rdataset. + """ + + # + # If we're adding a signature, do some special handling to + # check that the signature covers the same type as the + # other rdatas in this rdataset. If this is the first rdata + # in the set, initialize the covers field. + # + if self.rdclass != rd.rdclass or self.rdtype != rd.rdtype: + raise IncompatibleTypes + if ttl is not None: + self.update_ttl(ttl) + if self.rdtype == dns.rdatatype.RRSIG or \ + self.rdtype == dns.rdatatype.SIG: + covers = rd.covers() + if len(self) == 0 and self.covers == dns.rdatatype.NONE: + self.covers = covers + elif self.covers != covers: + raise DifferingCovers + if dns.rdatatype.is_singleton(rd.rdtype) and len(self) > 0: + self.clear() + super(Rdataset, self).add(rd) + + def union_update(self, other): + self.update_ttl(other.ttl) + super(Rdataset, self).union_update(other) + + def intersection_update(self, other): + self.update_ttl(other.ttl) + super(Rdataset, self).intersection_update(other) + + def update(self, other): + """Add all rdatas in other to self. + + *other*, a ``dns.rdataset.Rdataset``, the rdataset from which + to update. + """ + + self.update_ttl(other.ttl) + super(Rdataset, self).update(other) + + def __repr__(self): + if self.covers == 0: + ctext = '' + else: + ctext = '(' + dns.rdatatype.to_text(self.covers) + ')' + return '' + + def __str__(self): + return self.to_text() + + def __eq__(self, other): + if not isinstance(other, Rdataset): + return False + if self.rdclass != other.rdclass or \ + self.rdtype != other.rdtype or \ + self.covers != other.covers: + return False + return super(Rdataset, self).__eq__(other) + + def __ne__(self, other): + return not self.__eq__(other) + + def to_text(self, name=None, origin=None, relativize=True, + override_rdclass=None, **kw): + """Convert the rdataset into DNS master file format. + + See ``dns.name.Name.choose_relativity`` for more information + on how *origin* and *relativize* determine the way names + are emitted. + + Any additional keyword arguments are passed on to the rdata + ``to_text()`` method. + + *name*, a ``dns.name.Name``. If name is not ``None``, emit RRs with + *name* as the owner name. + + *origin*, a ``dns.name.Name`` or ``None``, the origin for relative + names. + + *relativize*, a ``bool``. If ``True``, names will be relativized + to *origin*. + """ + + if name is not None: + name = name.choose_relativity(origin, relativize) + ntext = str(name) + pad = ' ' + else: + ntext = '' + pad = '' + s = StringIO() + if override_rdclass is not None: + rdclass = override_rdclass + else: + rdclass = self.rdclass + if len(self) == 0: + # + # Empty rdatasets are used for the question section, and in + # some dynamic updates, so we don't need to print out the TTL + # (which is meaningless anyway). + # + s.write(u'{}{}{} {}\n'.format(ntext, pad, + dns.rdataclass.to_text(rdclass), + dns.rdatatype.to_text(self.rdtype))) + else: + for rd in self: + s.write(u'%s%s%d %s %s %s\n' % + (ntext, pad, self.ttl, dns.rdataclass.to_text(rdclass), + dns.rdatatype.to_text(self.rdtype), + rd.to_text(origin=origin, relativize=relativize, + **kw))) + # + # We strip off the final \n for the caller's convenience in printing + # + return s.getvalue()[:-1] + + def to_wire(self, name, file, compress=None, origin=None, + override_rdclass=None, want_shuffle=True): + """Convert the rdataset to wire format. + + *name*, a ``dns.name.Name`` is the owner name to use. + + *file* is the file where the name is emitted (typically a + BytesIO file). + + *compress*, a ``dict``, is the compression table to use. If + ``None`` (the default), names will not be compressed. + + *origin* is a ``dns.name.Name`` or ``None``. If the name is + relative and origin is not ``None``, then *origin* will be appended + to it. + + *override_rdclass*, an ``int``, is used as the class instead of the + class of the rdataset. This is useful when rendering rdatasets + associated with dynamic updates. + + *want_shuffle*, a ``bool``. If ``True``, then the order of the + Rdatas within the Rdataset will be shuffled before rendering. + + Returns an ``int``, the number of records emitted. + """ + + if override_rdclass is not None: + rdclass = override_rdclass + want_shuffle = False + else: + rdclass = self.rdclass + file.seek(0, 2) + if len(self) == 0: + name.to_wire(file, compress, origin) + stuff = struct.pack("!HHIH", self.rdtype, rdclass, 0, 0) + file.write(stuff) + return 1 + else: + if want_shuffle: + l = list(self) + random.shuffle(l) + else: + l = self + for rd in l: + name.to_wire(file, compress, origin) + stuff = struct.pack("!HHIH", self.rdtype, rdclass, + self.ttl, 0) + file.write(stuff) + start = file.tell() + rd.to_wire(file, compress, origin) + end = file.tell() + assert end - start < 65536 + file.seek(start - 2) + stuff = struct.pack("!H", end - start) + file.write(stuff) + file.seek(0, 2) + return len(self) + + def match(self, rdclass, rdtype, covers): + """Returns ``True`` if this rdataset matches the specified class, + type, and covers. + """ + if self.rdclass == rdclass and \ + self.rdtype == rdtype and \ + self.covers == covers: + return True + return False + + +def from_text_list(rdclass, rdtype, ttl, text_rdatas): + """Create an rdataset with the specified class, type, and TTL, and with + the specified list of rdatas in text format. + + Returns a ``dns.rdataset.Rdataset`` object. + """ + + if isinstance(rdclass, string_types): + rdclass = dns.rdataclass.from_text(rdclass) + if isinstance(rdtype, string_types): + rdtype = dns.rdatatype.from_text(rdtype) + r = Rdataset(rdclass, rdtype) + r.update_ttl(ttl) + for t in text_rdatas: + rd = dns.rdata.from_text(r.rdclass, r.rdtype, t) + r.add(rd) + return r + + +def from_text(rdclass, rdtype, ttl, *text_rdatas): + """Create an rdataset with the specified class, type, and TTL, and with + the specified rdatas in text format. + + Returns a ``dns.rdataset.Rdataset`` object. + """ + + return from_text_list(rdclass, rdtype, ttl, text_rdatas) + + +def from_rdata_list(ttl, rdatas): + """Create an rdataset with the specified TTL, and with + the specified list of rdata objects. + + Returns a ``dns.rdataset.Rdataset`` object. + """ + + if len(rdatas) == 0: + raise ValueError("rdata list must not be empty") + r = None + for rd in rdatas: + if r is None: + r = Rdataset(rd.rdclass, rd.rdtype) + r.update_ttl(ttl) + r.add(rd) + return r + + +def from_rdata(ttl, *rdatas): + """Create an rdataset with the specified TTL, and with + the specified rdata objects. + + Returns a ``dns.rdataset.Rdataset`` object. + """ + + return from_rdata_list(ttl, rdatas) diff --git a/openpype/vendor/python/python_2/dns/rdatatype.py b/openpype/vendor/python/python_2/dns/rdatatype.py new file mode 100644 index 0000000000..b247bc9c42 --- /dev/null +++ b/openpype/vendor/python/python_2/dns/rdatatype.py @@ -0,0 +1,287 @@ +# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license + +# Copyright (C) 2001-2017 Nominum, Inc. +# +# Permission to use, copy, modify, and distribute this software and its +# documentation for any purpose with or without fee is hereby granted, +# provided that the above copyright notice and this permission notice +# appear in all copies. +# +# THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES +# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR +# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT +# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. + +"""DNS Rdata Types.""" + +import re + +import dns.exception + +NONE = 0 +A = 1 +NS = 2 +MD = 3 +MF = 4 +CNAME = 5 +SOA = 6 +MB = 7 +MG = 8 +MR = 9 +NULL = 10 +WKS = 11 +PTR = 12 +HINFO = 13 +MINFO = 14 +MX = 15 +TXT = 16 +RP = 17 +AFSDB = 18 +X25 = 19 +ISDN = 20 +RT = 21 +NSAP = 22 +NSAP_PTR = 23 +SIG = 24 +KEY = 25 +PX = 26 +GPOS = 27 +AAAA = 28 +LOC = 29 +NXT = 30 +SRV = 33 +NAPTR = 35 +KX = 36 +CERT = 37 +A6 = 38 +DNAME = 39 +OPT = 41 +APL = 42 +DS = 43 +SSHFP = 44 +IPSECKEY = 45 +RRSIG = 46 +NSEC = 47 +DNSKEY = 48 +DHCID = 49 +NSEC3 = 50 +NSEC3PARAM = 51 +TLSA = 52 +HIP = 55 +CDS = 59 +CDNSKEY = 60 +OPENPGPKEY = 61 +CSYNC = 62 +SPF = 99 +UNSPEC = 103 +EUI48 = 108 +EUI64 = 109 +TKEY = 249 +TSIG = 250 +IXFR = 251 +AXFR = 252 +MAILB = 253 +MAILA = 254 +ANY = 255 +URI = 256 +CAA = 257 +AVC = 258 +TA = 32768 +DLV = 32769 + +_by_text = { + 'NONE': NONE, + 'A': A, + 'NS': NS, + 'MD': MD, + 'MF': MF, + 'CNAME': CNAME, + 'SOA': SOA, + 'MB': MB, + 'MG': MG, + 'MR': MR, + 'NULL': NULL, + 'WKS': WKS, + 'PTR': PTR, + 'HINFO': HINFO, + 'MINFO': MINFO, + 'MX': MX, + 'TXT': TXT, + 'RP': RP, + 'AFSDB': AFSDB, + 'X25': X25, + 'ISDN': ISDN, + 'RT': RT, + 'NSAP': NSAP, + 'NSAP-PTR': NSAP_PTR, + 'SIG': SIG, + 'KEY': KEY, + 'PX': PX, + 'GPOS': GPOS, + 'AAAA': AAAA, + 'LOC': LOC, + 'NXT': NXT, + 'SRV': SRV, + 'NAPTR': NAPTR, + 'KX': KX, + 'CERT': CERT, + 'A6': A6, + 'DNAME': DNAME, + 'OPT': OPT, + 'APL': APL, + 'DS': DS, + 'SSHFP': SSHFP, + 'IPSECKEY': IPSECKEY, + 'RRSIG': RRSIG, + 'NSEC': NSEC, + 'DNSKEY': DNSKEY, + 'DHCID': DHCID, + 'NSEC3': NSEC3, + 'NSEC3PARAM': NSEC3PARAM, + 'TLSA': TLSA, + 'HIP': HIP, + 'CDS': CDS, + 'CDNSKEY': CDNSKEY, + 'OPENPGPKEY': OPENPGPKEY, + 'CSYNC': CSYNC, + 'SPF': SPF, + 'UNSPEC': UNSPEC, + 'EUI48': EUI48, + 'EUI64': EUI64, + 'TKEY': TKEY, + 'TSIG': TSIG, + 'IXFR': IXFR, + 'AXFR': AXFR, + 'MAILB': MAILB, + 'MAILA': MAILA, + 'ANY': ANY, + 'URI': URI, + 'CAA': CAA, + 'AVC': AVC, + 'TA': TA, + 'DLV': DLV, +} + +# We construct the inverse mapping programmatically to ensure that we +# cannot make any mistakes (e.g. omissions, cut-and-paste errors) that +# would cause the mapping not to be true inverse. + +_by_value = {y: x for x, y in _by_text.items()} + +_metatypes = { + OPT: True +} + +_singletons = { + SOA: True, + NXT: True, + DNAME: True, + NSEC: True, + CNAME: True, +} + +_unknown_type_pattern = re.compile('TYPE([0-9]+)$', re.I) + + +class UnknownRdatatype(dns.exception.DNSException): + """DNS resource record type is unknown.""" + + +def from_text(text): + """Convert text into a DNS rdata type value. + + The input text can be a defined DNS RR type mnemonic or + instance of the DNS generic type syntax. + + For example, "NS" and "TYPE2" will both result in a value of 2. + + Raises ``dns.rdatatype.UnknownRdatatype`` if the type is unknown. + + Raises ``ValueError`` if the rdata type value is not >= 0 and <= 65535. + + Returns an ``int``. + """ + + value = _by_text.get(text.upper()) + if value is None: + match = _unknown_type_pattern.match(text) + if match is None: + raise UnknownRdatatype + value = int(match.group(1)) + if value < 0 or value > 65535: + raise ValueError("type must be between >= 0 and <= 65535") + return value + + +def to_text(value): + """Convert a DNS rdata type value to text. + + If the value has a known mnemonic, it will be used, otherwise the + DNS generic type syntax will be used. + + Raises ``ValueError`` if the rdata type value is not >= 0 and <= 65535. + + Returns a ``str``. + """ + + if value < 0 or value > 65535: + raise ValueError("type must be between >= 0 and <= 65535") + text = _by_value.get(value) + if text is None: + text = 'TYPE' + repr(value) + return text + + +def is_metatype(rdtype): + """True if the specified type is a metatype. + + *rdtype* is an ``int``. + + The currently defined metatypes are TKEY, TSIG, IXFR, AXFR, MAILA, + MAILB, ANY, and OPT. + + Returns a ``bool``. + """ + + if rdtype >= TKEY and rdtype <= ANY or rdtype in _metatypes: + return True + return False + + +def is_singleton(rdtype): + """Is the specified type a singleton type? + + Singleton types can only have a single rdata in an rdataset, or a single + RR in an RRset. + + The currently defined singleton types are CNAME, DNAME, NSEC, NXT, and + SOA. + + *rdtype* is an ``int``. + + Returns a ``bool``. + """ + + if rdtype in _singletons: + return True + return False + + +def register_type(rdtype, rdtype_text, is_singleton=False): # pylint: disable=redefined-outer-name + """Dynamically register an rdatatype. + + *rdtype*, an ``int``, the rdatatype to register. + + *rdtype_text*, a ``text``, the textual form of the rdatatype. + + *is_singleton*, a ``bool``, indicating if the type is a singleton (i.e. + RRsets of the type can have only one member.) + """ + + _by_text[rdtype_text] = rdtype + _by_value[rdtype] = rdtype_text + if is_singleton: + _singletons[rdtype] = True diff --git a/openpype/vendor/python/python_2/dns/rdtypes/ANY/AFSDB.py b/openpype/vendor/python/python_2/dns/rdtypes/ANY/AFSDB.py new file mode 100644 index 0000000000..c6a700cf56 --- /dev/null +++ b/openpype/vendor/python/python_2/dns/rdtypes/ANY/AFSDB.py @@ -0,0 +1,55 @@ +# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license + +# Copyright (C) 2003-2007, 2009-2011 Nominum, Inc. +# +# Permission to use, copy, modify, and distribute this software and its +# documentation for any purpose with or without fee is hereby granted, +# provided that the above copyright notice and this permission notice +# appear in all copies. +# +# THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES +# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR +# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT +# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. + +import dns.rdtypes.mxbase + + +class AFSDB(dns.rdtypes.mxbase.UncompressedDowncasingMX): + + """AFSDB record + + @ivar subtype: the subtype value + @type subtype: int + @ivar hostname: the hostname name + @type hostname: dns.name.Name object""" + + # Use the property mechanism to make "subtype" an alias for the + # "preference" attribute, and "hostname" an alias for the "exchange" + # attribute. + # + # This lets us inherit the UncompressedMX implementation but lets + # the caller use appropriate attribute names for the rdata type. + # + # We probably lose some performance vs. a cut-and-paste + # implementation, but this way we don't copy code, and that's + # good. + + def get_subtype(self): + return self.preference + + def set_subtype(self, subtype): + self.preference = subtype + + subtype = property(get_subtype, set_subtype) + + def get_hostname(self): + return self.exchange + + def set_hostname(self, hostname): + self.exchange = hostname + + hostname = property(get_hostname, set_hostname) diff --git a/openpype/vendor/python/python_2/dns/rdtypes/ANY/AVC.py b/openpype/vendor/python/python_2/dns/rdtypes/ANY/AVC.py new file mode 100644 index 0000000000..7f340b39d2 --- /dev/null +++ b/openpype/vendor/python/python_2/dns/rdtypes/ANY/AVC.py @@ -0,0 +1,25 @@ +# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license + +# Copyright (C) 2016 Nominum, Inc. +# +# Permission to use, copy, modify, and distribute this software and its +# documentation for any purpose with or without fee is hereby granted, +# provided that the above copyright notice and this permission notice +# appear in all copies. +# +# THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES +# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR +# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT +# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. + +import dns.rdtypes.txtbase + + +class AVC(dns.rdtypes.txtbase.TXTBase): + + """AVC record + + @see: U{http://www.iana.org/assignments/dns-parameters/AVC/avc-completed-template}""" diff --git a/openpype/vendor/python/python_2/dns/rdtypes/ANY/CAA.py b/openpype/vendor/python/python_2/dns/rdtypes/ANY/CAA.py new file mode 100644 index 0000000000..0acf201ab1 --- /dev/null +++ b/openpype/vendor/python/python_2/dns/rdtypes/ANY/CAA.py @@ -0,0 +1,75 @@ +# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license + +# Copyright (C) 2003-2007, 2009-2011 Nominum, Inc. +# +# Permission to use, copy, modify, and distribute this software and its +# documentation for any purpose with or without fee is hereby granted, +# provided that the above copyright notice and this permission notice +# appear in all copies. +# +# THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES +# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR +# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT +# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. + +import struct + +import dns.exception +import dns.rdata +import dns.tokenizer + + +class CAA(dns.rdata.Rdata): + + """CAA (Certification Authority Authorization) record + + @ivar flags: the flags + @type flags: int + @ivar tag: the tag + @type tag: string + @ivar value: the value + @type value: string + @see: RFC 6844""" + + __slots__ = ['flags', 'tag', 'value'] + + def __init__(self, rdclass, rdtype, flags, tag, value): + super(CAA, self).__init__(rdclass, rdtype) + self.flags = flags + self.tag = tag + self.value = value + + def to_text(self, origin=None, relativize=True, **kw): + return '%u %s "%s"' % (self.flags, + dns.rdata._escapify(self.tag), + dns.rdata._escapify(self.value)) + + @classmethod + def from_text(cls, rdclass, rdtype, tok, origin=None, relativize=True): + flags = tok.get_uint8() + tag = tok.get_string().encode() + if len(tag) > 255: + raise dns.exception.SyntaxError("tag too long") + if not tag.isalnum(): + raise dns.exception.SyntaxError("tag is not alphanumeric") + value = tok.get_string().encode() + return cls(rdclass, rdtype, flags, tag, value) + + def to_wire(self, file, compress=None, origin=None): + file.write(struct.pack('!B', self.flags)) + l = len(self.tag) + assert l < 256 + file.write(struct.pack('!B', l)) + file.write(self.tag) + file.write(self.value) + + @classmethod + def from_wire(cls, rdclass, rdtype, wire, current, rdlen, origin=None): + (flags, l) = struct.unpack('!BB', wire[current: current + 2]) + current += 2 + tag = wire[current: current + l] + value = wire[current + l:current + rdlen - 2] + return cls(rdclass, rdtype, flags, tag, value) diff --git a/openpype/vendor/python/python_2/dns/rdtypes/ANY/CDNSKEY.py b/openpype/vendor/python/python_2/dns/rdtypes/ANY/CDNSKEY.py new file mode 100644 index 0000000000..653ae1be16 --- /dev/null +++ b/openpype/vendor/python/python_2/dns/rdtypes/ANY/CDNSKEY.py @@ -0,0 +1,27 @@ +# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license + +# Copyright (C) 2004-2007, 2009-2011 Nominum, Inc. +# +# Permission to use, copy, modify, and distribute this software and its +# documentation for any purpose with or without fee is hereby granted, +# provided that the above copyright notice and this permission notice +# appear in all copies. +# +# THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES +# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR +# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT +# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. + +import dns.rdtypes.dnskeybase +from dns.rdtypes.dnskeybase import flags_to_text_set, flags_from_text_set + + +__all__ = ['flags_to_text_set', 'flags_from_text_set'] + + +class CDNSKEY(dns.rdtypes.dnskeybase.DNSKEYBase): + + """CDNSKEY record""" diff --git a/openpype/vendor/python/python_2/dns/rdtypes/ANY/CDS.py b/openpype/vendor/python/python_2/dns/rdtypes/ANY/CDS.py new file mode 100644 index 0000000000..a63041dd79 --- /dev/null +++ b/openpype/vendor/python/python_2/dns/rdtypes/ANY/CDS.py @@ -0,0 +1,23 @@ +# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license + +# Copyright (C) 2003-2007, 2009-2011 Nominum, Inc. +# +# Permission to use, copy, modify, and distribute this software and its +# documentation for any purpose with or without fee is hereby granted, +# provided that the above copyright notice and this permission notice +# appear in all copies. +# +# THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES +# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR +# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT +# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. + +import dns.rdtypes.dsbase + + +class CDS(dns.rdtypes.dsbase.DSBase): + + """CDS record""" diff --git a/openpype/vendor/python/python_2/dns/rdtypes/ANY/CERT.py b/openpype/vendor/python/python_2/dns/rdtypes/ANY/CERT.py new file mode 100644 index 0000000000..eea27b52c3 --- /dev/null +++ b/openpype/vendor/python/python_2/dns/rdtypes/ANY/CERT.py @@ -0,0 +1,123 @@ +# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license + +# Copyright (C) 2003-2007, 2009-2011 Nominum, Inc. +# +# Permission to use, copy, modify, and distribute this software and its +# documentation for any purpose with or without fee is hereby granted, +# provided that the above copyright notice and this permission notice +# appear in all copies. +# +# THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES +# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR +# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT +# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. + +import struct +import base64 + +import dns.exception +import dns.dnssec +import dns.rdata +import dns.tokenizer + +_ctype_by_value = { + 1: 'PKIX', + 2: 'SPKI', + 3: 'PGP', + 253: 'URI', + 254: 'OID', +} + +_ctype_by_name = { + 'PKIX': 1, + 'SPKI': 2, + 'PGP': 3, + 'URI': 253, + 'OID': 254, +} + + +def _ctype_from_text(what): + v = _ctype_by_name.get(what) + if v is not None: + return v + return int(what) + + +def _ctype_to_text(what): + v = _ctype_by_value.get(what) + if v is not None: + return v + return str(what) + + +class CERT(dns.rdata.Rdata): + + """CERT record + + @ivar certificate_type: certificate type + @type certificate_type: int + @ivar key_tag: key tag + @type key_tag: int + @ivar algorithm: algorithm + @type algorithm: int + @ivar certificate: the certificate or CRL + @type certificate: string + @see: RFC 2538""" + + __slots__ = ['certificate_type', 'key_tag', 'algorithm', 'certificate'] + + def __init__(self, rdclass, rdtype, certificate_type, key_tag, algorithm, + certificate): + super(CERT, self).__init__(rdclass, rdtype) + self.certificate_type = certificate_type + self.key_tag = key_tag + self.algorithm = algorithm + self.certificate = certificate + + def to_text(self, origin=None, relativize=True, **kw): + certificate_type = _ctype_to_text(self.certificate_type) + return "%s %d %s %s" % (certificate_type, self.key_tag, + dns.dnssec.algorithm_to_text(self.algorithm), + dns.rdata._base64ify(self.certificate)) + + @classmethod + def from_text(cls, rdclass, rdtype, tok, origin=None, relativize=True): + certificate_type = _ctype_from_text(tok.get_string()) + key_tag = tok.get_uint16() + algorithm = dns.dnssec.algorithm_from_text(tok.get_string()) + if algorithm < 0 or algorithm > 255: + raise dns.exception.SyntaxError("bad algorithm type") + chunks = [] + while 1: + t = tok.get().unescape() + if t.is_eol_or_eof(): + break + if not t.is_identifier(): + raise dns.exception.SyntaxError + chunks.append(t.value.encode()) + b64 = b''.join(chunks) + certificate = base64.b64decode(b64) + return cls(rdclass, rdtype, certificate_type, key_tag, + algorithm, certificate) + + def to_wire(self, file, compress=None, origin=None): + prefix = struct.pack("!HHB", self.certificate_type, self.key_tag, + self.algorithm) + file.write(prefix) + file.write(self.certificate) + + @classmethod + def from_wire(cls, rdclass, rdtype, wire, current, rdlen, origin=None): + prefix = wire[current: current + 5].unwrap() + current += 5 + rdlen -= 5 + if rdlen < 0: + raise dns.exception.FormError + (certificate_type, key_tag, algorithm) = struct.unpack("!HHB", prefix) + certificate = wire[current: current + rdlen].unwrap() + return cls(rdclass, rdtype, certificate_type, key_tag, algorithm, + certificate) diff --git a/openpype/vendor/python/python_2/dns/rdtypes/ANY/CNAME.py b/openpype/vendor/python/python_2/dns/rdtypes/ANY/CNAME.py new file mode 100644 index 0000000000..11d42aa7fd --- /dev/null +++ b/openpype/vendor/python/python_2/dns/rdtypes/ANY/CNAME.py @@ -0,0 +1,27 @@ +# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license + +# Copyright (C) 2003-2007, 2009-2011 Nominum, Inc. +# +# Permission to use, copy, modify, and distribute this software and its +# documentation for any purpose with or without fee is hereby granted, +# provided that the above copyright notice and this permission notice +# appear in all copies. +# +# THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES +# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR +# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT +# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. + +import dns.rdtypes.nsbase + + +class CNAME(dns.rdtypes.nsbase.NSBase): + + """CNAME record + + Note: although CNAME is officially a singleton type, dnspython allows + non-singleton CNAME rdatasets because such sets have been commonly + used by BIND and other nameservers for load balancing.""" diff --git a/openpype/vendor/python/python_2/dns/rdtypes/ANY/CSYNC.py b/openpype/vendor/python/python_2/dns/rdtypes/ANY/CSYNC.py new file mode 100644 index 0000000000..06292fb28c --- /dev/null +++ b/openpype/vendor/python/python_2/dns/rdtypes/ANY/CSYNC.py @@ -0,0 +1,126 @@ +# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license + +# Copyright (C) 2004-2007, 2009-2011, 2016 Nominum, Inc. +# +# Permission to use, copy, modify, and distribute this software and its +# documentation for any purpose with or without fee is hereby granted, +# provided that the above copyright notice and this permission notice +# appear in all copies. +# +# THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES +# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR +# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT +# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. + +import struct + +import dns.exception +import dns.rdata +import dns.rdatatype +import dns.name +from dns._compat import xrange + +class CSYNC(dns.rdata.Rdata): + + """CSYNC record + + @ivar serial: the SOA serial number + @type serial: int + @ivar flags: the CSYNC flags + @type flags: int + @ivar windows: the windowed bitmap list + @type windows: list of (window number, string) tuples""" + + __slots__ = ['serial', 'flags', 'windows'] + + def __init__(self, rdclass, rdtype, serial, flags, windows): + super(CSYNC, self).__init__(rdclass, rdtype) + self.serial = serial + self.flags = flags + self.windows = windows + + def to_text(self, origin=None, relativize=True, **kw): + text = '' + for (window, bitmap) in self.windows: + bits = [] + for i in xrange(0, len(bitmap)): + byte = bitmap[i] + for j in xrange(0, 8): + if byte & (0x80 >> j): + bits.append(dns.rdatatype.to_text(window * 256 + + i * 8 + j)) + text += (' ' + ' '.join(bits)) + return '%d %d%s' % (self.serial, self.flags, text) + + @classmethod + def from_text(cls, rdclass, rdtype, tok, origin=None, relativize=True): + serial = tok.get_uint32() + flags = tok.get_uint16() + rdtypes = [] + while 1: + token = tok.get().unescape() + if token.is_eol_or_eof(): + break + nrdtype = dns.rdatatype.from_text(token.value) + if nrdtype == 0: + raise dns.exception.SyntaxError("CSYNC with bit 0") + if nrdtype > 65535: + raise dns.exception.SyntaxError("CSYNC with bit > 65535") + rdtypes.append(nrdtype) + rdtypes.sort() + window = 0 + octets = 0 + prior_rdtype = 0 + bitmap = bytearray(b'\0' * 32) + windows = [] + for nrdtype in rdtypes: + if nrdtype == prior_rdtype: + continue + prior_rdtype = nrdtype + new_window = nrdtype // 256 + if new_window != window: + windows.append((window, bitmap[0:octets])) + bitmap = bytearray(b'\0' * 32) + window = new_window + offset = nrdtype % 256 + byte = offset // 8 + bit = offset % 8 + octets = byte + 1 + bitmap[byte] = bitmap[byte] | (0x80 >> bit) + + windows.append((window, bitmap[0:octets])) + return cls(rdclass, rdtype, serial, flags, windows) + + def to_wire(self, file, compress=None, origin=None): + file.write(struct.pack('!IH', self.serial, self.flags)) + for (window, bitmap) in self.windows: + file.write(struct.pack('!BB', window, len(bitmap))) + file.write(bitmap) + + @classmethod + def from_wire(cls, rdclass, rdtype, wire, current, rdlen, origin=None): + if rdlen < 6: + raise dns.exception.FormError("CSYNC too short") + (serial, flags) = struct.unpack("!IH", wire[current: current + 6]) + current += 6 + rdlen -= 6 + windows = [] + while rdlen > 0: + if rdlen < 3: + raise dns.exception.FormError("CSYNC too short") + window = wire[current] + octets = wire[current + 1] + if octets == 0 or octets > 32: + raise dns.exception.FormError("bad CSYNC octets") + current += 2 + rdlen -= 2 + if rdlen < octets: + raise dns.exception.FormError("bad CSYNC bitmap length") + bitmap = bytearray(wire[current: current + octets].unwrap()) + current += octets + rdlen -= octets + windows.append((window, bitmap)) + return cls(rdclass, rdtype, serial, flags, windows) diff --git a/openpype/vendor/python/python_2/dns/rdtypes/ANY/DLV.py b/openpype/vendor/python/python_2/dns/rdtypes/ANY/DLV.py new file mode 100644 index 0000000000..1635212583 --- /dev/null +++ b/openpype/vendor/python/python_2/dns/rdtypes/ANY/DLV.py @@ -0,0 +1,23 @@ +# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license + +# Copyright (C) 2009-2011 Nominum, Inc. +# +# Permission to use, copy, modify, and distribute this software and its +# documentation for any purpose with or without fee is hereby granted, +# provided that the above copyright notice and this permission notice +# appear in all copies. +# +# THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES +# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR +# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT +# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. + +import dns.rdtypes.dsbase + + +class DLV(dns.rdtypes.dsbase.DSBase): + + """DLV record""" diff --git a/openpype/vendor/python/python_2/dns/rdtypes/ANY/DNAME.py b/openpype/vendor/python/python_2/dns/rdtypes/ANY/DNAME.py new file mode 100644 index 0000000000..2499283cfa --- /dev/null +++ b/openpype/vendor/python/python_2/dns/rdtypes/ANY/DNAME.py @@ -0,0 +1,26 @@ +# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license + +# Copyright (C) 2003-2007, 2009-2011 Nominum, Inc. +# +# Permission to use, copy, modify, and distribute this software and its +# documentation for any purpose with or without fee is hereby granted, +# provided that the above copyright notice and this permission notice +# appear in all copies. +# +# THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES +# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR +# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT +# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. + +import dns.rdtypes.nsbase + + +class DNAME(dns.rdtypes.nsbase.UncompressedNS): + + """DNAME record""" + + def to_digestable(self, origin=None): + return self.target.to_digestable(origin) diff --git a/openpype/vendor/python/python_2/dns/rdtypes/ANY/DNSKEY.py b/openpype/vendor/python/python_2/dns/rdtypes/ANY/DNSKEY.py new file mode 100644 index 0000000000..e36f7bc5b1 --- /dev/null +++ b/openpype/vendor/python/python_2/dns/rdtypes/ANY/DNSKEY.py @@ -0,0 +1,27 @@ +# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license + +# Copyright (C) 2004-2007, 2009-2011 Nominum, Inc. +# +# Permission to use, copy, modify, and distribute this software and its +# documentation for any purpose with or without fee is hereby granted, +# provided that the above copyright notice and this permission notice +# appear in all copies. +# +# THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES +# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR +# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT +# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. + +import dns.rdtypes.dnskeybase +from dns.rdtypes.dnskeybase import flags_to_text_set, flags_from_text_set + + +__all__ = ['flags_to_text_set', 'flags_from_text_set'] + + +class DNSKEY(dns.rdtypes.dnskeybase.DNSKEYBase): + + """DNSKEY record""" diff --git a/openpype/vendor/python/python_2/dns/rdtypes/ANY/DS.py b/openpype/vendor/python/python_2/dns/rdtypes/ANY/DS.py new file mode 100644 index 0000000000..7d457b2281 --- /dev/null +++ b/openpype/vendor/python/python_2/dns/rdtypes/ANY/DS.py @@ -0,0 +1,23 @@ +# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license + +# Copyright (C) 2003-2007, 2009-2011 Nominum, Inc. +# +# Permission to use, copy, modify, and distribute this software and its +# documentation for any purpose with or without fee is hereby granted, +# provided that the above copyright notice and this permission notice +# appear in all copies. +# +# THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES +# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR +# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT +# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. + +import dns.rdtypes.dsbase + + +class DS(dns.rdtypes.dsbase.DSBase): + + """DS record""" diff --git a/openpype/vendor/python/python_2/dns/rdtypes/ANY/EUI48.py b/openpype/vendor/python/python_2/dns/rdtypes/ANY/EUI48.py new file mode 100644 index 0000000000..aa260e205d --- /dev/null +++ b/openpype/vendor/python/python_2/dns/rdtypes/ANY/EUI48.py @@ -0,0 +1,29 @@ +# Copyright (C) 2015 Red Hat, Inc. +# Author: Petr Spacek +# +# Permission to use, copy, modify, and distribute this software and its +# documentation for any purpose with or without fee is hereby granted, +# provided that the above copyright notice and this permission notice +# appear in all copies. +# +# THE SOFTWARE IS PROVIDED 'AS IS' AND RED HAT DISCLAIMS ALL WARRANTIES +# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR +# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT +# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. + +import dns.rdtypes.euibase + + +class EUI48(dns.rdtypes.euibase.EUIBase): + + """EUI48 record + + @ivar fingerprint: 48-bit Extended Unique Identifier (EUI-48) + @type fingerprint: string + @see: rfc7043.txt""" + + byte_len = 6 # 0123456789ab (in hex) + text_len = byte_len * 3 - 1 # 01-23-45-67-89-ab diff --git a/openpype/vendor/python/python_2/dns/rdtypes/ANY/EUI64.py b/openpype/vendor/python/python_2/dns/rdtypes/ANY/EUI64.py new file mode 100644 index 0000000000..5eba350d8f --- /dev/null +++ b/openpype/vendor/python/python_2/dns/rdtypes/ANY/EUI64.py @@ -0,0 +1,29 @@ +# Copyright (C) 2015 Red Hat, Inc. +# Author: Petr Spacek +# +# Permission to use, copy, modify, and distribute this software and its +# documentation for any purpose with or without fee is hereby granted, +# provided that the above copyright notice and this permission notice +# appear in all copies. +# +# THE SOFTWARE IS PROVIDED 'AS IS' AND RED HAT DISCLAIMS ALL WARRANTIES +# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR +# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT +# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. + +import dns.rdtypes.euibase + + +class EUI64(dns.rdtypes.euibase.EUIBase): + + """EUI64 record + + @ivar fingerprint: 64-bit Extended Unique Identifier (EUI-64) + @type fingerprint: string + @see: rfc7043.txt""" + + byte_len = 8 # 0123456789abcdef (in hex) + text_len = byte_len * 3 - 1 # 01-23-45-67-89-ab-cd-ef diff --git a/openpype/vendor/python/python_2/dns/rdtypes/ANY/GPOS.py b/openpype/vendor/python/python_2/dns/rdtypes/ANY/GPOS.py new file mode 100644 index 0000000000..422822f03b --- /dev/null +++ b/openpype/vendor/python/python_2/dns/rdtypes/ANY/GPOS.py @@ -0,0 +1,162 @@ +# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license + +# Copyright (C) 2003-2007, 2009-2011 Nominum, Inc. +# +# Permission to use, copy, modify, and distribute this software and its +# documentation for any purpose with or without fee is hereby granted, +# provided that the above copyright notice and this permission notice +# appear in all copies. +# +# THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES +# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR +# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT +# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. + +import struct + +import dns.exception +import dns.rdata +import dns.tokenizer +from dns._compat import long, text_type + + +def _validate_float_string(what): + if what[0] == b'-'[0] or what[0] == b'+'[0]: + what = what[1:] + if what.isdigit(): + return + (left, right) = what.split(b'.') + if left == b'' and right == b'': + raise dns.exception.FormError + if not left == b'' and not left.decode().isdigit(): + raise dns.exception.FormError + if not right == b'' and not right.decode().isdigit(): + raise dns.exception.FormError + + +def _sanitize(value): + if isinstance(value, text_type): + return value.encode() + return value + + +class GPOS(dns.rdata.Rdata): + + """GPOS record + + @ivar latitude: latitude + @type latitude: string + @ivar longitude: longitude + @type longitude: string + @ivar altitude: altitude + @type altitude: string + @see: RFC 1712""" + + __slots__ = ['latitude', 'longitude', 'altitude'] + + def __init__(self, rdclass, rdtype, latitude, longitude, altitude): + super(GPOS, self).__init__(rdclass, rdtype) + if isinstance(latitude, float) or \ + isinstance(latitude, int) or \ + isinstance(latitude, long): + latitude = str(latitude) + if isinstance(longitude, float) or \ + isinstance(longitude, int) or \ + isinstance(longitude, long): + longitude = str(longitude) + if isinstance(altitude, float) or \ + isinstance(altitude, int) or \ + isinstance(altitude, long): + altitude = str(altitude) + latitude = _sanitize(latitude) + longitude = _sanitize(longitude) + altitude = _sanitize(altitude) + _validate_float_string(latitude) + _validate_float_string(longitude) + _validate_float_string(altitude) + self.latitude = latitude + self.longitude = longitude + self.altitude = altitude + + def to_text(self, origin=None, relativize=True, **kw): + return '{} {} {}'.format(self.latitude.decode(), + self.longitude.decode(), + self.altitude.decode()) + + @classmethod + def from_text(cls, rdclass, rdtype, tok, origin=None, relativize=True): + latitude = tok.get_string() + longitude = tok.get_string() + altitude = tok.get_string() + tok.get_eol() + return cls(rdclass, rdtype, latitude, longitude, altitude) + + def to_wire(self, file, compress=None, origin=None): + l = len(self.latitude) + assert l < 256 + file.write(struct.pack('!B', l)) + file.write(self.latitude) + l = len(self.longitude) + assert l < 256 + file.write(struct.pack('!B', l)) + file.write(self.longitude) + l = len(self.altitude) + assert l < 256 + file.write(struct.pack('!B', l)) + file.write(self.altitude) + + @classmethod + def from_wire(cls, rdclass, rdtype, wire, current, rdlen, origin=None): + l = wire[current] + current += 1 + rdlen -= 1 + if l > rdlen: + raise dns.exception.FormError + latitude = wire[current: current + l].unwrap() + current += l + rdlen -= l + l = wire[current] + current += 1 + rdlen -= 1 + if l > rdlen: + raise dns.exception.FormError + longitude = wire[current: current + l].unwrap() + current += l + rdlen -= l + l = wire[current] + current += 1 + rdlen -= 1 + if l != rdlen: + raise dns.exception.FormError + altitude = wire[current: current + l].unwrap() + return cls(rdclass, rdtype, latitude, longitude, altitude) + + def _get_float_latitude(self): + return float(self.latitude) + + def _set_float_latitude(self, value): + self.latitude = str(value) + + float_latitude = property(_get_float_latitude, _set_float_latitude, + doc="latitude as a floating point value") + + def _get_float_longitude(self): + return float(self.longitude) + + def _set_float_longitude(self, value): + self.longitude = str(value) + + float_longitude = property(_get_float_longitude, _set_float_longitude, + doc="longitude as a floating point value") + + def _get_float_altitude(self): + return float(self.altitude) + + def _set_float_altitude(self, value): + self.altitude = str(value) + + float_altitude = property(_get_float_altitude, _set_float_altitude, + doc="altitude as a floating point value") diff --git a/openpype/vendor/python/python_2/dns/rdtypes/ANY/HINFO.py b/openpype/vendor/python/python_2/dns/rdtypes/ANY/HINFO.py new file mode 100644 index 0000000000..e4e0b34a49 --- /dev/null +++ b/openpype/vendor/python/python_2/dns/rdtypes/ANY/HINFO.py @@ -0,0 +1,86 @@ +# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license + +# Copyright (C) 2003-2007, 2009-2011 Nominum, Inc. +# +# Permission to use, copy, modify, and distribute this software and its +# documentation for any purpose with or without fee is hereby granted, +# provided that the above copyright notice and this permission notice +# appear in all copies. +# +# THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES +# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR +# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT +# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. + +import struct + +import dns.exception +import dns.rdata +import dns.tokenizer +from dns._compat import text_type + + +class HINFO(dns.rdata.Rdata): + + """HINFO record + + @ivar cpu: the CPU type + @type cpu: string + @ivar os: the OS type + @type os: string + @see: RFC 1035""" + + __slots__ = ['cpu', 'os'] + + def __init__(self, rdclass, rdtype, cpu, os): + super(HINFO, self).__init__(rdclass, rdtype) + if isinstance(cpu, text_type): + self.cpu = cpu.encode() + else: + self.cpu = cpu + if isinstance(os, text_type): + self.os = os.encode() + else: + self.os = os + + def to_text(self, origin=None, relativize=True, **kw): + return '"{}" "{}"'.format(dns.rdata._escapify(self.cpu), + dns.rdata._escapify(self.os)) + + @classmethod + def from_text(cls, rdclass, rdtype, tok, origin=None, relativize=True): + cpu = tok.get_string() + os = tok.get_string() + tok.get_eol() + return cls(rdclass, rdtype, cpu, os) + + def to_wire(self, file, compress=None, origin=None): + l = len(self.cpu) + assert l < 256 + file.write(struct.pack('!B', l)) + file.write(self.cpu) + l = len(self.os) + assert l < 256 + file.write(struct.pack('!B', l)) + file.write(self.os) + + @classmethod + def from_wire(cls, rdclass, rdtype, wire, current, rdlen, origin=None): + l = wire[current] + current += 1 + rdlen -= 1 + if l > rdlen: + raise dns.exception.FormError + cpu = wire[current:current + l].unwrap() + current += l + rdlen -= l + l = wire[current] + current += 1 + rdlen -= 1 + if l != rdlen: + raise dns.exception.FormError + os = wire[current: current + l].unwrap() + return cls(rdclass, rdtype, cpu, os) diff --git a/openpype/vendor/python/python_2/dns/rdtypes/ANY/HIP.py b/openpype/vendor/python/python_2/dns/rdtypes/ANY/HIP.py new file mode 100644 index 0000000000..7c876b2d2f --- /dev/null +++ b/openpype/vendor/python/python_2/dns/rdtypes/ANY/HIP.py @@ -0,0 +1,115 @@ +# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license + +# Copyright (C) 2010, 2011 Nominum, Inc. +# +# Permission to use, copy, modify, and distribute this software and its +# documentation for any purpose with or without fee is hereby granted, +# provided that the above copyright notice and this permission notice +# appear in all copies. +# +# THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES +# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR +# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT +# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. + +import struct +import base64 +import binascii + +import dns.exception +import dns.rdata +import dns.rdatatype + + +class HIP(dns.rdata.Rdata): + + """HIP record + + @ivar hit: the host identity tag + @type hit: string + @ivar algorithm: the public key cryptographic algorithm + @type algorithm: int + @ivar key: the public key + @type key: string + @ivar servers: the rendezvous servers + @type servers: list of dns.name.Name objects + @see: RFC 5205""" + + __slots__ = ['hit', 'algorithm', 'key', 'servers'] + + def __init__(self, rdclass, rdtype, hit, algorithm, key, servers): + super(HIP, self).__init__(rdclass, rdtype) + self.hit = hit + self.algorithm = algorithm + self.key = key + self.servers = servers + + def to_text(self, origin=None, relativize=True, **kw): + hit = binascii.hexlify(self.hit).decode() + key = base64.b64encode(self.key).replace(b'\n', b'').decode() + text = u'' + servers = [] + for server in self.servers: + servers.append(server.choose_relativity(origin, relativize)) + if len(servers) > 0: + text += (u' ' + u' '.join((x.to_unicode() for x in servers))) + return u'%u %s %s%s' % (self.algorithm, hit, key, text) + + @classmethod + def from_text(cls, rdclass, rdtype, tok, origin=None, relativize=True): + algorithm = tok.get_uint8() + hit = binascii.unhexlify(tok.get_string().encode()) + if len(hit) > 255: + raise dns.exception.SyntaxError("HIT too long") + key = base64.b64decode(tok.get_string().encode()) + servers = [] + while 1: + token = tok.get() + if token.is_eol_or_eof(): + break + server = dns.name.from_text(token.value, origin) + server.choose_relativity(origin, relativize) + servers.append(server) + return cls(rdclass, rdtype, hit, algorithm, key, servers) + + def to_wire(self, file, compress=None, origin=None): + lh = len(self.hit) + lk = len(self.key) + file.write(struct.pack("!BBH", lh, self.algorithm, lk)) + file.write(self.hit) + file.write(self.key) + for server in self.servers: + server.to_wire(file, None, origin) + + @classmethod + def from_wire(cls, rdclass, rdtype, wire, current, rdlen, origin=None): + (lh, algorithm, lk) = struct.unpack('!BBH', + wire[current: current + 4]) + current += 4 + rdlen -= 4 + hit = wire[current: current + lh].unwrap() + current += lh + rdlen -= lh + key = wire[current: current + lk].unwrap() + current += lk + rdlen -= lk + servers = [] + while rdlen > 0: + (server, cused) = dns.name.from_wire(wire[: current + rdlen], + current) + current += cused + rdlen -= cused + if origin is not None: + server = server.relativize(origin) + servers.append(server) + return cls(rdclass, rdtype, hit, algorithm, key, servers) + + def choose_relativity(self, origin=None, relativize=True): + servers = [] + for server in self.servers: + server = server.choose_relativity(origin, relativize) + servers.append(server) + self.servers = servers diff --git a/openpype/vendor/python/python_2/dns/rdtypes/ANY/ISDN.py b/openpype/vendor/python/python_2/dns/rdtypes/ANY/ISDN.py new file mode 100644 index 0000000000..f5f5f8b9ea --- /dev/null +++ b/openpype/vendor/python/python_2/dns/rdtypes/ANY/ISDN.py @@ -0,0 +1,99 @@ +# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license + +# Copyright (C) 2003-2007, 2009-2011 Nominum, Inc. +# +# Permission to use, copy, modify, and distribute this software and its +# documentation for any purpose with or without fee is hereby granted, +# provided that the above copyright notice and this permission notice +# appear in all copies. +# +# THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES +# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR +# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT +# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. + +import struct + +import dns.exception +import dns.rdata +import dns.tokenizer +from dns._compat import text_type + + +class ISDN(dns.rdata.Rdata): + + """ISDN record + + @ivar address: the ISDN address + @type address: string + @ivar subaddress: the ISDN subaddress (or '' if not present) + @type subaddress: string + @see: RFC 1183""" + + __slots__ = ['address', 'subaddress'] + + def __init__(self, rdclass, rdtype, address, subaddress): + super(ISDN, self).__init__(rdclass, rdtype) + if isinstance(address, text_type): + self.address = address.encode() + else: + self.address = address + if isinstance(address, text_type): + self.subaddress = subaddress.encode() + else: + self.subaddress = subaddress + + def to_text(self, origin=None, relativize=True, **kw): + if self.subaddress: + return '"{}" "{}"'.format(dns.rdata._escapify(self.address), + dns.rdata._escapify(self.subaddress)) + else: + return '"%s"' % dns.rdata._escapify(self.address) + + @classmethod + def from_text(cls, rdclass, rdtype, tok, origin=None, relativize=True): + address = tok.get_string() + t = tok.get() + if not t.is_eol_or_eof(): + tok.unget(t) + subaddress = tok.get_string() + else: + tok.unget(t) + subaddress = '' + tok.get_eol() + return cls(rdclass, rdtype, address, subaddress) + + def to_wire(self, file, compress=None, origin=None): + l = len(self.address) + assert l < 256 + file.write(struct.pack('!B', l)) + file.write(self.address) + l = len(self.subaddress) + if l > 0: + assert l < 256 + file.write(struct.pack('!B', l)) + file.write(self.subaddress) + + @classmethod + def from_wire(cls, rdclass, rdtype, wire, current, rdlen, origin=None): + l = wire[current] + current += 1 + rdlen -= 1 + if l > rdlen: + raise dns.exception.FormError + address = wire[current: current + l].unwrap() + current += l + rdlen -= l + if rdlen > 0: + l = wire[current] + current += 1 + rdlen -= 1 + if l != rdlen: + raise dns.exception.FormError + subaddress = wire[current: current + l].unwrap() + else: + subaddress = '' + return cls(rdclass, rdtype, address, subaddress) diff --git a/openpype/vendor/python/python_2/dns/rdtypes/ANY/LOC.py b/openpype/vendor/python/python_2/dns/rdtypes/ANY/LOC.py new file mode 100644 index 0000000000..da9bb03a95 --- /dev/null +++ b/openpype/vendor/python/python_2/dns/rdtypes/ANY/LOC.py @@ -0,0 +1,327 @@ +# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license + +# Copyright (C) 2003-2007, 2009-2011 Nominum, Inc. +# +# Permission to use, copy, modify, and distribute this software and its +# documentation for any purpose with or without fee is hereby granted, +# provided that the above copyright notice and this permission notice +# appear in all copies. +# +# THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES +# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR +# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT +# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. + +from __future__ import division + +import struct + +import dns.exception +import dns.rdata +from dns._compat import long, xrange, round_py2_compat + + +_pows = tuple(long(10**i) for i in range(0, 11)) + +# default values are in centimeters +_default_size = 100.0 +_default_hprec = 1000000.0 +_default_vprec = 1000.0 + + +def _exponent_of(what, desc): + if what == 0: + return 0 + exp = None + for i in xrange(len(_pows)): + if what // _pows[i] == long(0): + exp = i - 1 + break + if exp is None or exp < 0: + raise dns.exception.SyntaxError("%s value out of bounds" % desc) + return exp + + +def _float_to_tuple(what): + if what < 0: + sign = -1 + what *= -1 + else: + sign = 1 + what = round_py2_compat(what * 3600000) + degrees = int(what // 3600000) + what -= degrees * 3600000 + minutes = int(what // 60000) + what -= minutes * 60000 + seconds = int(what // 1000) + what -= int(seconds * 1000) + what = int(what) + return (degrees, minutes, seconds, what, sign) + + +def _tuple_to_float(what): + value = float(what[0]) + value += float(what[1]) / 60.0 + value += float(what[2]) / 3600.0 + value += float(what[3]) / 3600000.0 + return float(what[4]) * value + + +def _encode_size(what, desc): + what = long(what) + exponent = _exponent_of(what, desc) & 0xF + base = what // pow(10, exponent) & 0xF + return base * 16 + exponent + + +def _decode_size(what, desc): + exponent = what & 0x0F + if exponent > 9: + raise dns.exception.SyntaxError("bad %s exponent" % desc) + base = (what & 0xF0) >> 4 + if base > 9: + raise dns.exception.SyntaxError("bad %s base" % desc) + return long(base) * pow(10, exponent) + + +class LOC(dns.rdata.Rdata): + + """LOC record + + @ivar latitude: latitude + @type latitude: (int, int, int, int, sign) tuple specifying the degrees, minutes, + seconds, milliseconds, and sign of the coordinate. + @ivar longitude: longitude + @type longitude: (int, int, int, int, sign) tuple specifying the degrees, + minutes, seconds, milliseconds, and sign of the coordinate. + @ivar altitude: altitude + @type altitude: float + @ivar size: size of the sphere + @type size: float + @ivar horizontal_precision: horizontal precision + @type horizontal_precision: float + @ivar vertical_precision: vertical precision + @type vertical_precision: float + @see: RFC 1876""" + + __slots__ = ['latitude', 'longitude', 'altitude', 'size', + 'horizontal_precision', 'vertical_precision'] + + def __init__(self, rdclass, rdtype, latitude, longitude, altitude, + size=_default_size, hprec=_default_hprec, + vprec=_default_vprec): + """Initialize a LOC record instance. + + The parameters I{latitude} and I{longitude} may be either a 4-tuple + of integers specifying (degrees, minutes, seconds, milliseconds), + or they may be floating point values specifying the number of + degrees. The other parameters are floats. Size, horizontal precision, + and vertical precision are specified in centimeters.""" + + super(LOC, self).__init__(rdclass, rdtype) + if isinstance(latitude, int) or isinstance(latitude, long): + latitude = float(latitude) + if isinstance(latitude, float): + latitude = _float_to_tuple(latitude) + self.latitude = latitude + if isinstance(longitude, int) or isinstance(longitude, long): + longitude = float(longitude) + if isinstance(longitude, float): + longitude = _float_to_tuple(longitude) + self.longitude = longitude + self.altitude = float(altitude) + self.size = float(size) + self.horizontal_precision = float(hprec) + self.vertical_precision = float(vprec) + + def to_text(self, origin=None, relativize=True, **kw): + if self.latitude[4] > 0: + lat_hemisphere = 'N' + else: + lat_hemisphere = 'S' + if self.longitude[4] > 0: + long_hemisphere = 'E' + else: + long_hemisphere = 'W' + text = "%d %d %d.%03d %s %d %d %d.%03d %s %0.2fm" % ( + self.latitude[0], self.latitude[1], + self.latitude[2], self.latitude[3], lat_hemisphere, + self.longitude[0], self.longitude[1], self.longitude[2], + self.longitude[3], long_hemisphere, + self.altitude / 100.0 + ) + + # do not print default values + if self.size != _default_size or \ + self.horizontal_precision != _default_hprec or \ + self.vertical_precision != _default_vprec: + text += " {:0.2f}m {:0.2f}m {:0.2f}m".format( + self.size / 100.0, self.horizontal_precision / 100.0, + self.vertical_precision / 100.0 + ) + return text + + @classmethod + def from_text(cls, rdclass, rdtype, tok, origin=None, relativize=True): + latitude = [0, 0, 0, 0, 1] + longitude = [0, 0, 0, 0, 1] + size = _default_size + hprec = _default_hprec + vprec = _default_vprec + + latitude[0] = tok.get_int() + t = tok.get_string() + if t.isdigit(): + latitude[1] = int(t) + t = tok.get_string() + if '.' in t: + (seconds, milliseconds) = t.split('.') + if not seconds.isdigit(): + raise dns.exception.SyntaxError( + 'bad latitude seconds value') + latitude[2] = int(seconds) + if latitude[2] >= 60: + raise dns.exception.SyntaxError('latitude seconds >= 60') + l = len(milliseconds) + if l == 0 or l > 3 or not milliseconds.isdigit(): + raise dns.exception.SyntaxError( + 'bad latitude milliseconds value') + if l == 1: + m = 100 + elif l == 2: + m = 10 + else: + m = 1 + latitude[3] = m * int(milliseconds) + t = tok.get_string() + elif t.isdigit(): + latitude[2] = int(t) + t = tok.get_string() + if t == 'S': + latitude[4] = -1 + elif t != 'N': + raise dns.exception.SyntaxError('bad latitude hemisphere value') + + longitude[0] = tok.get_int() + t = tok.get_string() + if t.isdigit(): + longitude[1] = int(t) + t = tok.get_string() + if '.' in t: + (seconds, milliseconds) = t.split('.') + if not seconds.isdigit(): + raise dns.exception.SyntaxError( + 'bad longitude seconds value') + longitude[2] = int(seconds) + if longitude[2] >= 60: + raise dns.exception.SyntaxError('longitude seconds >= 60') + l = len(milliseconds) + if l == 0 or l > 3 or not milliseconds.isdigit(): + raise dns.exception.SyntaxError( + 'bad longitude milliseconds value') + if l == 1: + m = 100 + elif l == 2: + m = 10 + else: + m = 1 + longitude[3] = m * int(milliseconds) + t = tok.get_string() + elif t.isdigit(): + longitude[2] = int(t) + t = tok.get_string() + if t == 'W': + longitude[4] = -1 + elif t != 'E': + raise dns.exception.SyntaxError('bad longitude hemisphere value') + + t = tok.get_string() + if t[-1] == 'm': + t = t[0: -1] + altitude = float(t) * 100.0 # m -> cm + + token = tok.get().unescape() + if not token.is_eol_or_eof(): + value = token.value + if value[-1] == 'm': + value = value[0: -1] + size = float(value) * 100.0 # m -> cm + token = tok.get().unescape() + if not token.is_eol_or_eof(): + value = token.value + if value[-1] == 'm': + value = value[0: -1] + hprec = float(value) * 100.0 # m -> cm + token = tok.get().unescape() + if not token.is_eol_or_eof(): + value = token.value + if value[-1] == 'm': + value = value[0: -1] + vprec = float(value) * 100.0 # m -> cm + tok.get_eol() + + return cls(rdclass, rdtype, latitude, longitude, altitude, + size, hprec, vprec) + + def to_wire(self, file, compress=None, origin=None): + milliseconds = (self.latitude[0] * 3600000 + + self.latitude[1] * 60000 + + self.latitude[2] * 1000 + + self.latitude[3]) * self.latitude[4] + latitude = long(0x80000000) + milliseconds + milliseconds = (self.longitude[0] * 3600000 + + self.longitude[1] * 60000 + + self.longitude[2] * 1000 + + self.longitude[3]) * self.longitude[4] + longitude = long(0x80000000) + milliseconds + altitude = long(self.altitude) + long(10000000) + size = _encode_size(self.size, "size") + hprec = _encode_size(self.horizontal_precision, "horizontal precision") + vprec = _encode_size(self.vertical_precision, "vertical precision") + wire = struct.pack("!BBBBIII", 0, size, hprec, vprec, latitude, + longitude, altitude) + file.write(wire) + + @classmethod + def from_wire(cls, rdclass, rdtype, wire, current, rdlen, origin=None): + (version, size, hprec, vprec, latitude, longitude, altitude) = \ + struct.unpack("!BBBBIII", wire[current: current + rdlen]) + if latitude > long(0x80000000): + latitude = float(latitude - long(0x80000000)) / 3600000 + else: + latitude = -1 * float(long(0x80000000) - latitude) / 3600000 + if latitude < -90.0 or latitude > 90.0: + raise dns.exception.FormError("bad latitude") + if longitude > long(0x80000000): + longitude = float(longitude - long(0x80000000)) / 3600000 + else: + longitude = -1 * float(long(0x80000000) - longitude) / 3600000 + if longitude < -180.0 or longitude > 180.0: + raise dns.exception.FormError("bad longitude") + altitude = float(altitude) - 10000000.0 + size = _decode_size(size, "size") + hprec = _decode_size(hprec, "horizontal precision") + vprec = _decode_size(vprec, "vertical precision") + return cls(rdclass, rdtype, latitude, longitude, altitude, + size, hprec, vprec) + + def _get_float_latitude(self): + return _tuple_to_float(self.latitude) + + def _set_float_latitude(self, value): + self.latitude = _float_to_tuple(value) + + float_latitude = property(_get_float_latitude, _set_float_latitude, + doc="latitude as a floating point value") + + def _get_float_longitude(self): + return _tuple_to_float(self.longitude) + + def _set_float_longitude(self, value): + self.longitude = _float_to_tuple(value) + + float_longitude = property(_get_float_longitude, _set_float_longitude, + doc="longitude as a floating point value") diff --git a/openpype/vendor/python/python_2/dns/rdtypes/ANY/MX.py b/openpype/vendor/python/python_2/dns/rdtypes/ANY/MX.py new file mode 100644 index 0000000000..0a06494f73 --- /dev/null +++ b/openpype/vendor/python/python_2/dns/rdtypes/ANY/MX.py @@ -0,0 +1,23 @@ +# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license + +# Copyright (C) 2003-2007, 2009-2011 Nominum, Inc. +# +# Permission to use, copy, modify, and distribute this software and its +# documentation for any purpose with or without fee is hereby granted, +# provided that the above copyright notice and this permission notice +# appear in all copies. +# +# THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES +# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR +# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT +# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. + +import dns.rdtypes.mxbase + + +class MX(dns.rdtypes.mxbase.MXBase): + + """MX record""" diff --git a/openpype/vendor/python/python_2/dns/rdtypes/ANY/NS.py b/openpype/vendor/python/python_2/dns/rdtypes/ANY/NS.py new file mode 100644 index 0000000000..f9fcf637f7 --- /dev/null +++ b/openpype/vendor/python/python_2/dns/rdtypes/ANY/NS.py @@ -0,0 +1,23 @@ +# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license + +# Copyright (C) 2003-2007, 2009-2011 Nominum, Inc. +# +# Permission to use, copy, modify, and distribute this software and its +# documentation for any purpose with or without fee is hereby granted, +# provided that the above copyright notice and this permission notice +# appear in all copies. +# +# THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES +# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR +# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT +# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. + +import dns.rdtypes.nsbase + + +class NS(dns.rdtypes.nsbase.NSBase): + + """NS record""" diff --git a/openpype/vendor/python/python_2/dns/rdtypes/ANY/NSEC.py b/openpype/vendor/python/python_2/dns/rdtypes/ANY/NSEC.py new file mode 100644 index 0000000000..4e3da7296b --- /dev/null +++ b/openpype/vendor/python/python_2/dns/rdtypes/ANY/NSEC.py @@ -0,0 +1,128 @@ +# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license + +# Copyright (C) 2004-2007, 2009-2011 Nominum, Inc. +# +# Permission to use, copy, modify, and distribute this software and its +# documentation for any purpose with or without fee is hereby granted, +# provided that the above copyright notice and this permission notice +# appear in all copies. +# +# THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES +# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR +# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT +# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. + +import struct + +import dns.exception +import dns.rdata +import dns.rdatatype +import dns.name +from dns._compat import xrange + + +class NSEC(dns.rdata.Rdata): + + """NSEC record + + @ivar next: the next name + @type next: dns.name.Name object + @ivar windows: the windowed bitmap list + @type windows: list of (window number, string) tuples""" + + __slots__ = ['next', 'windows'] + + def __init__(self, rdclass, rdtype, next, windows): + super(NSEC, self).__init__(rdclass, rdtype) + self.next = next + self.windows = windows + + def to_text(self, origin=None, relativize=True, **kw): + next = self.next.choose_relativity(origin, relativize) + text = '' + for (window, bitmap) in self.windows: + bits = [] + for i in xrange(0, len(bitmap)): + byte = bitmap[i] + for j in xrange(0, 8): + if byte & (0x80 >> j): + bits.append(dns.rdatatype.to_text(window * 256 + + i * 8 + j)) + text += (' ' + ' '.join(bits)) + return '{}{}'.format(next, text) + + @classmethod + def from_text(cls, rdclass, rdtype, tok, origin=None, relativize=True): + next = tok.get_name() + next = next.choose_relativity(origin, relativize) + rdtypes = [] + while 1: + token = tok.get().unescape() + if token.is_eol_or_eof(): + break + nrdtype = dns.rdatatype.from_text(token.value) + if nrdtype == 0: + raise dns.exception.SyntaxError("NSEC with bit 0") + if nrdtype > 65535: + raise dns.exception.SyntaxError("NSEC with bit > 65535") + rdtypes.append(nrdtype) + rdtypes.sort() + window = 0 + octets = 0 + prior_rdtype = 0 + bitmap = bytearray(b'\0' * 32) + windows = [] + for nrdtype in rdtypes: + if nrdtype == prior_rdtype: + continue + prior_rdtype = nrdtype + new_window = nrdtype // 256 + if new_window != window: + windows.append((window, bitmap[0:octets])) + bitmap = bytearray(b'\0' * 32) + window = new_window + offset = nrdtype % 256 + byte = offset // 8 + bit = offset % 8 + octets = byte + 1 + bitmap[byte] = bitmap[byte] | (0x80 >> bit) + + windows.append((window, bitmap[0:octets])) + return cls(rdclass, rdtype, next, windows) + + def to_wire(self, file, compress=None, origin=None): + self.next.to_wire(file, None, origin) + for (window, bitmap) in self.windows: + file.write(struct.pack('!BB', window, len(bitmap))) + file.write(bitmap) + + @classmethod + def from_wire(cls, rdclass, rdtype, wire, current, rdlen, origin=None): + (next, cused) = dns.name.from_wire(wire[: current + rdlen], current) + current += cused + rdlen -= cused + windows = [] + while rdlen > 0: + if rdlen < 3: + raise dns.exception.FormError("NSEC too short") + window = wire[current] + octets = wire[current + 1] + if octets == 0 or octets > 32: + raise dns.exception.FormError("bad NSEC octets") + current += 2 + rdlen -= 2 + if rdlen < octets: + raise dns.exception.FormError("bad NSEC bitmap length") + bitmap = bytearray(wire[current: current + octets].unwrap()) + current += octets + rdlen -= octets + windows.append((window, bitmap)) + if origin is not None: + next = next.relativize(origin) + return cls(rdclass, rdtype, next, windows) + + def choose_relativity(self, origin=None, relativize=True): + self.next = self.next.choose_relativity(origin, relativize) diff --git a/openpype/vendor/python/python_2/dns/rdtypes/ANY/NSEC3.py b/openpype/vendor/python/python_2/dns/rdtypes/ANY/NSEC3.py new file mode 100644 index 0000000000..1c281c4a4d --- /dev/null +++ b/openpype/vendor/python/python_2/dns/rdtypes/ANY/NSEC3.py @@ -0,0 +1,196 @@ +# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license + +# Copyright (C) 2004-2017 Nominum, Inc. +# +# Permission to use, copy, modify, and distribute this software and its +# documentation for any purpose with or without fee is hereby granted, +# provided that the above copyright notice and this permission notice +# appear in all copies. +# +# THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES +# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR +# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT +# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. + +import base64 +import binascii +import string +import struct + +import dns.exception +import dns.rdata +import dns.rdatatype +from dns._compat import xrange, text_type, PY3 + +# pylint: disable=deprecated-string-function +if PY3: + b32_hex_to_normal = bytes.maketrans(b'0123456789ABCDEFGHIJKLMNOPQRSTUV', + b'ABCDEFGHIJKLMNOPQRSTUVWXYZ234567') + b32_normal_to_hex = bytes.maketrans(b'ABCDEFGHIJKLMNOPQRSTUVWXYZ234567', + b'0123456789ABCDEFGHIJKLMNOPQRSTUV') +else: + b32_hex_to_normal = string.maketrans('0123456789ABCDEFGHIJKLMNOPQRSTUV', + 'ABCDEFGHIJKLMNOPQRSTUVWXYZ234567') + b32_normal_to_hex = string.maketrans('ABCDEFGHIJKLMNOPQRSTUVWXYZ234567', + '0123456789ABCDEFGHIJKLMNOPQRSTUV') +# pylint: enable=deprecated-string-function + + +# hash algorithm constants +SHA1 = 1 + +# flag constants +OPTOUT = 1 + + +class NSEC3(dns.rdata.Rdata): + + """NSEC3 record + + @ivar algorithm: the hash algorithm number + @type algorithm: int + @ivar flags: the flags + @type flags: int + @ivar iterations: the number of iterations + @type iterations: int + @ivar salt: the salt + @type salt: string + @ivar next: the next name hash + @type next: string + @ivar windows: the windowed bitmap list + @type windows: list of (window number, string) tuples""" + + __slots__ = ['algorithm', 'flags', 'iterations', 'salt', 'next', 'windows'] + + def __init__(self, rdclass, rdtype, algorithm, flags, iterations, salt, + next, windows): + super(NSEC3, self).__init__(rdclass, rdtype) + self.algorithm = algorithm + self.flags = flags + self.iterations = iterations + if isinstance(salt, text_type): + self.salt = salt.encode() + else: + self.salt = salt + self.next = next + self.windows = windows + + def to_text(self, origin=None, relativize=True, **kw): + next = base64.b32encode(self.next).translate( + b32_normal_to_hex).lower().decode() + if self.salt == b'': + salt = '-' + else: + salt = binascii.hexlify(self.salt).decode() + text = u'' + for (window, bitmap) in self.windows: + bits = [] + for i in xrange(0, len(bitmap)): + byte = bitmap[i] + for j in xrange(0, 8): + if byte & (0x80 >> j): + bits.append(dns.rdatatype.to_text(window * 256 + + i * 8 + j)) + text += (u' ' + u' '.join(bits)) + return u'%u %u %u %s %s%s' % (self.algorithm, self.flags, + self.iterations, salt, next, text) + + @classmethod + def from_text(cls, rdclass, rdtype, tok, origin=None, relativize=True): + algorithm = tok.get_uint8() + flags = tok.get_uint8() + iterations = tok.get_uint16() + salt = tok.get_string() + if salt == u'-': + salt = b'' + else: + salt = binascii.unhexlify(salt.encode('ascii')) + next = tok.get_string().encode( + 'ascii').upper().translate(b32_hex_to_normal) + next = base64.b32decode(next) + rdtypes = [] + while 1: + token = tok.get().unescape() + if token.is_eol_or_eof(): + break + nrdtype = dns.rdatatype.from_text(token.value) + if nrdtype == 0: + raise dns.exception.SyntaxError("NSEC3 with bit 0") + if nrdtype > 65535: + raise dns.exception.SyntaxError("NSEC3 with bit > 65535") + rdtypes.append(nrdtype) + rdtypes.sort() + window = 0 + octets = 0 + prior_rdtype = 0 + bitmap = bytearray(b'\0' * 32) + windows = [] + for nrdtype in rdtypes: + if nrdtype == prior_rdtype: + continue + prior_rdtype = nrdtype + new_window = nrdtype // 256 + if new_window != window: + if octets != 0: + windows.append((window, bitmap[0:octets])) + bitmap = bytearray(b'\0' * 32) + window = new_window + offset = nrdtype % 256 + byte = offset // 8 + bit = offset % 8 + octets = byte + 1 + bitmap[byte] = bitmap[byte] | (0x80 >> bit) + if octets != 0: + windows.append((window, bitmap[0:octets])) + return cls(rdclass, rdtype, algorithm, flags, iterations, salt, next, + windows) + + def to_wire(self, file, compress=None, origin=None): + l = len(self.salt) + file.write(struct.pack("!BBHB", self.algorithm, self.flags, + self.iterations, l)) + file.write(self.salt) + l = len(self.next) + file.write(struct.pack("!B", l)) + file.write(self.next) + for (window, bitmap) in self.windows: + file.write(struct.pack("!BB", window, len(bitmap))) + file.write(bitmap) + + @classmethod + def from_wire(cls, rdclass, rdtype, wire, current, rdlen, origin=None): + (algorithm, flags, iterations, slen) = \ + struct.unpack('!BBHB', wire[current: current + 5]) + + current += 5 + rdlen -= 5 + salt = wire[current: current + slen].unwrap() + current += slen + rdlen -= slen + nlen = wire[current] + current += 1 + rdlen -= 1 + next = wire[current: current + nlen].unwrap() + current += nlen + rdlen -= nlen + windows = [] + while rdlen > 0: + if rdlen < 3: + raise dns.exception.FormError("NSEC3 too short") + window = wire[current] + octets = wire[current + 1] + if octets == 0 or octets > 32: + raise dns.exception.FormError("bad NSEC3 octets") + current += 2 + rdlen -= 2 + if rdlen < octets: + raise dns.exception.FormError("bad NSEC3 bitmap length") + bitmap = bytearray(wire[current: current + octets].unwrap()) + current += octets + rdlen -= octets + windows.append((window, bitmap)) + return cls(rdclass, rdtype, algorithm, flags, iterations, salt, next, + windows) diff --git a/openpype/vendor/python/python_2/dns/rdtypes/ANY/NSEC3PARAM.py b/openpype/vendor/python/python_2/dns/rdtypes/ANY/NSEC3PARAM.py new file mode 100644 index 0000000000..87c36e5675 --- /dev/null +++ b/openpype/vendor/python/python_2/dns/rdtypes/ANY/NSEC3PARAM.py @@ -0,0 +1,90 @@ +# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license + +# Copyright (C) 2004-2007, 2009-2011 Nominum, Inc. +# +# Permission to use, copy, modify, and distribute this software and its +# documentation for any purpose with or without fee is hereby granted, +# provided that the above copyright notice and this permission notice +# appear in all copies. +# +# THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES +# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR +# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT +# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. + +import struct +import binascii + +import dns.exception +import dns.rdata +from dns._compat import text_type + + +class NSEC3PARAM(dns.rdata.Rdata): + + """NSEC3PARAM record + + @ivar algorithm: the hash algorithm number + @type algorithm: int + @ivar flags: the flags + @type flags: int + @ivar iterations: the number of iterations + @type iterations: int + @ivar salt: the salt + @type salt: string""" + + __slots__ = ['algorithm', 'flags', 'iterations', 'salt'] + + def __init__(self, rdclass, rdtype, algorithm, flags, iterations, salt): + super(NSEC3PARAM, self).__init__(rdclass, rdtype) + self.algorithm = algorithm + self.flags = flags + self.iterations = iterations + if isinstance(salt, text_type): + self.salt = salt.encode() + else: + self.salt = salt + + def to_text(self, origin=None, relativize=True, **kw): + if self.salt == b'': + salt = '-' + else: + salt = binascii.hexlify(self.salt).decode() + return '%u %u %u %s' % (self.algorithm, self.flags, self.iterations, + salt) + + @classmethod + def from_text(cls, rdclass, rdtype, tok, origin=None, relativize=True): + algorithm = tok.get_uint8() + flags = tok.get_uint8() + iterations = tok.get_uint16() + salt = tok.get_string() + if salt == '-': + salt = '' + else: + salt = binascii.unhexlify(salt.encode()) + tok.get_eol() + return cls(rdclass, rdtype, algorithm, flags, iterations, salt) + + def to_wire(self, file, compress=None, origin=None): + l = len(self.salt) + file.write(struct.pack("!BBHB", self.algorithm, self.flags, + self.iterations, l)) + file.write(self.salt) + + @classmethod + def from_wire(cls, rdclass, rdtype, wire, current, rdlen, origin=None): + (algorithm, flags, iterations, slen) = \ + struct.unpack('!BBHB', + wire[current: current + 5]) + current += 5 + rdlen -= 5 + salt = wire[current: current + slen].unwrap() + current += slen + rdlen -= slen + if rdlen != 0: + raise dns.exception.FormError + return cls(rdclass, rdtype, algorithm, flags, iterations, salt) diff --git a/openpype/vendor/python/python_2/dns/rdtypes/ANY/OPENPGPKEY.py b/openpype/vendor/python/python_2/dns/rdtypes/ANY/OPENPGPKEY.py new file mode 100644 index 0000000000..a066cf98df --- /dev/null +++ b/openpype/vendor/python/python_2/dns/rdtypes/ANY/OPENPGPKEY.py @@ -0,0 +1,60 @@ +# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license + +# Copyright (C) 2016 Nominum, Inc. +# +# Permission to use, copy, modify, and distribute this software and its +# documentation for any purpose with or without fee is hereby granted, +# provided that the above copyright notice and this permission notice +# appear in all copies. +# +# THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES +# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR +# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT +# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. + +import base64 + +import dns.exception +import dns.rdata +import dns.tokenizer + +class OPENPGPKEY(dns.rdata.Rdata): + + """OPENPGPKEY record + + @ivar key: the key + @type key: bytes + @see: RFC 7929 + """ + + def __init__(self, rdclass, rdtype, key): + super(OPENPGPKEY, self).__init__(rdclass, rdtype) + self.key = key + + def to_text(self, origin=None, relativize=True, **kw): + return dns.rdata._base64ify(self.key) + + @classmethod + def from_text(cls, rdclass, rdtype, tok, origin=None, relativize=True): + chunks = [] + while 1: + t = tok.get().unescape() + if t.is_eol_or_eof(): + break + if not t.is_identifier(): + raise dns.exception.SyntaxError + chunks.append(t.value.encode()) + b64 = b''.join(chunks) + key = base64.b64decode(b64) + return cls(rdclass, rdtype, key) + + def to_wire(self, file, compress=None, origin=None): + file.write(self.key) + + @classmethod + def from_wire(cls, rdclass, rdtype, wire, current, rdlen, origin=None): + key = wire[current: current + rdlen].unwrap() + return cls(rdclass, rdtype, key) diff --git a/openpype/vendor/python/python_2/dns/rdtypes/ANY/PTR.py b/openpype/vendor/python/python_2/dns/rdtypes/ANY/PTR.py new file mode 100644 index 0000000000..20cd50761d --- /dev/null +++ b/openpype/vendor/python/python_2/dns/rdtypes/ANY/PTR.py @@ -0,0 +1,23 @@ +# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license + +# Copyright (C) 2003-2007, 2009-2011 Nominum, Inc. +# +# Permission to use, copy, modify, and distribute this software and its +# documentation for any purpose with or without fee is hereby granted, +# provided that the above copyright notice and this permission notice +# appear in all copies. +# +# THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES +# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR +# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT +# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. + +import dns.rdtypes.nsbase + + +class PTR(dns.rdtypes.nsbase.NSBase): + + """PTR record""" diff --git a/openpype/vendor/python/python_2/dns/rdtypes/ANY/RP.py b/openpype/vendor/python/python_2/dns/rdtypes/ANY/RP.py new file mode 100644 index 0000000000..8f07be9071 --- /dev/null +++ b/openpype/vendor/python/python_2/dns/rdtypes/ANY/RP.py @@ -0,0 +1,82 @@ +# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license + +# Copyright (C) 2003-2007, 2009-2011 Nominum, Inc. +# +# Permission to use, copy, modify, and distribute this software and its +# documentation for any purpose with or without fee is hereby granted, +# provided that the above copyright notice and this permission notice +# appear in all copies. +# +# THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES +# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR +# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT +# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. + +import dns.exception +import dns.rdata +import dns.name + + +class RP(dns.rdata.Rdata): + + """RP record + + @ivar mbox: The responsible person's mailbox + @type mbox: dns.name.Name object + @ivar txt: The owner name of a node with TXT records, or the root name + if no TXT records are associated with this RP. + @type txt: dns.name.Name object + @see: RFC 1183""" + + __slots__ = ['mbox', 'txt'] + + def __init__(self, rdclass, rdtype, mbox, txt): + super(RP, self).__init__(rdclass, rdtype) + self.mbox = mbox + self.txt = txt + + def to_text(self, origin=None, relativize=True, **kw): + mbox = self.mbox.choose_relativity(origin, relativize) + txt = self.txt.choose_relativity(origin, relativize) + return "{} {}".format(str(mbox), str(txt)) + + @classmethod + def from_text(cls, rdclass, rdtype, tok, origin=None, relativize=True): + mbox = tok.get_name() + txt = tok.get_name() + mbox = mbox.choose_relativity(origin, relativize) + txt = txt.choose_relativity(origin, relativize) + tok.get_eol() + return cls(rdclass, rdtype, mbox, txt) + + def to_wire(self, file, compress=None, origin=None): + self.mbox.to_wire(file, None, origin) + self.txt.to_wire(file, None, origin) + + def to_digestable(self, origin=None): + return self.mbox.to_digestable(origin) + \ + self.txt.to_digestable(origin) + + @classmethod + def from_wire(cls, rdclass, rdtype, wire, current, rdlen, origin=None): + (mbox, cused) = dns.name.from_wire(wire[: current + rdlen], + current) + current += cused + rdlen -= cused + if rdlen <= 0: + raise dns.exception.FormError + (txt, cused) = dns.name.from_wire(wire[: current + rdlen], + current) + if cused != rdlen: + raise dns.exception.FormError + if origin is not None: + mbox = mbox.relativize(origin) + txt = txt.relativize(origin) + return cls(rdclass, rdtype, mbox, txt) + + def choose_relativity(self, origin=None, relativize=True): + self.mbox = self.mbox.choose_relativity(origin, relativize) + self.txt = self.txt.choose_relativity(origin, relativize) diff --git a/openpype/vendor/python/python_2/dns/rdtypes/ANY/RRSIG.py b/openpype/vendor/python/python_2/dns/rdtypes/ANY/RRSIG.py new file mode 100644 index 0000000000..d3756ece4e --- /dev/null +++ b/openpype/vendor/python/python_2/dns/rdtypes/ANY/RRSIG.py @@ -0,0 +1,158 @@ +# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license + +# Copyright (C) 2004-2007, 2009-2011 Nominum, Inc. +# +# Permission to use, copy, modify, and distribute this software and its +# documentation for any purpose with or without fee is hereby granted, +# provided that the above copyright notice and this permission notice +# appear in all copies. +# +# THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES +# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR +# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT +# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. + +import base64 +import calendar +import struct +import time + +import dns.dnssec +import dns.exception +import dns.rdata +import dns.rdatatype + + +class BadSigTime(dns.exception.DNSException): + + """Time in DNS SIG or RRSIG resource record cannot be parsed.""" + + +def sigtime_to_posixtime(what): + if len(what) != 14: + raise BadSigTime + year = int(what[0:4]) + month = int(what[4:6]) + day = int(what[6:8]) + hour = int(what[8:10]) + minute = int(what[10:12]) + second = int(what[12:14]) + return calendar.timegm((year, month, day, hour, minute, second, + 0, 0, 0)) + + +def posixtime_to_sigtime(what): + return time.strftime('%Y%m%d%H%M%S', time.gmtime(what)) + + +class RRSIG(dns.rdata.Rdata): + + """RRSIG record + + @ivar type_covered: the rdata type this signature covers + @type type_covered: int + @ivar algorithm: the algorithm used for the sig + @type algorithm: int + @ivar labels: number of labels + @type labels: int + @ivar original_ttl: the original TTL + @type original_ttl: long + @ivar expiration: signature expiration time + @type expiration: long + @ivar inception: signature inception time + @type inception: long + @ivar key_tag: the key tag + @type key_tag: int + @ivar signer: the signer + @type signer: dns.name.Name object + @ivar signature: the signature + @type signature: string""" + + __slots__ = ['type_covered', 'algorithm', 'labels', 'original_ttl', + 'expiration', 'inception', 'key_tag', 'signer', + 'signature'] + + def __init__(self, rdclass, rdtype, type_covered, algorithm, labels, + original_ttl, expiration, inception, key_tag, signer, + signature): + super(RRSIG, self).__init__(rdclass, rdtype) + self.type_covered = type_covered + self.algorithm = algorithm + self.labels = labels + self.original_ttl = original_ttl + self.expiration = expiration + self.inception = inception + self.key_tag = key_tag + self.signer = signer + self.signature = signature + + def covers(self): + return self.type_covered + + def to_text(self, origin=None, relativize=True, **kw): + return '%s %d %d %d %s %s %d %s %s' % ( + dns.rdatatype.to_text(self.type_covered), + self.algorithm, + self.labels, + self.original_ttl, + posixtime_to_sigtime(self.expiration), + posixtime_to_sigtime(self.inception), + self.key_tag, + self.signer.choose_relativity(origin, relativize), + dns.rdata._base64ify(self.signature) + ) + + @classmethod + def from_text(cls, rdclass, rdtype, tok, origin=None, relativize=True): + type_covered = dns.rdatatype.from_text(tok.get_string()) + algorithm = dns.dnssec.algorithm_from_text(tok.get_string()) + labels = tok.get_int() + original_ttl = tok.get_ttl() + expiration = sigtime_to_posixtime(tok.get_string()) + inception = sigtime_to_posixtime(tok.get_string()) + key_tag = tok.get_int() + signer = tok.get_name() + signer = signer.choose_relativity(origin, relativize) + chunks = [] + while 1: + t = tok.get().unescape() + if t.is_eol_or_eof(): + break + if not t.is_identifier(): + raise dns.exception.SyntaxError + chunks.append(t.value.encode()) + b64 = b''.join(chunks) + signature = base64.b64decode(b64) + return cls(rdclass, rdtype, type_covered, algorithm, labels, + original_ttl, expiration, inception, key_tag, signer, + signature) + + def to_wire(self, file, compress=None, origin=None): + header = struct.pack('!HBBIIIH', self.type_covered, + self.algorithm, self.labels, + self.original_ttl, self.expiration, + self.inception, self.key_tag) + file.write(header) + self.signer.to_wire(file, None, origin) + file.write(self.signature) + + @classmethod + def from_wire(cls, rdclass, rdtype, wire, current, rdlen, origin=None): + header = struct.unpack('!HBBIIIH', wire[current: current + 18]) + current += 18 + rdlen -= 18 + (signer, cused) = dns.name.from_wire(wire[: current + rdlen], current) + current += cused + rdlen -= cused + if origin is not None: + signer = signer.relativize(origin) + signature = wire[current: current + rdlen].unwrap() + return cls(rdclass, rdtype, header[0], header[1], header[2], + header[3], header[4], header[5], header[6], signer, + signature) + + def choose_relativity(self, origin=None, relativize=True): + self.signer = self.signer.choose_relativity(origin, relativize) diff --git a/openpype/vendor/python/python_2/dns/rdtypes/ANY/RT.py b/openpype/vendor/python/python_2/dns/rdtypes/ANY/RT.py new file mode 100644 index 0000000000..d0feb79e9d --- /dev/null +++ b/openpype/vendor/python/python_2/dns/rdtypes/ANY/RT.py @@ -0,0 +1,23 @@ +# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license + +# Copyright (C) 2003-2007, 2009-2011 Nominum, Inc. +# +# Permission to use, copy, modify, and distribute this software and its +# documentation for any purpose with or without fee is hereby granted, +# provided that the above copyright notice and this permission notice +# appear in all copies. +# +# THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES +# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR +# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT +# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. + +import dns.rdtypes.mxbase + + +class RT(dns.rdtypes.mxbase.UncompressedDowncasingMX): + + """RT record""" diff --git a/openpype/vendor/python/python_2/dns/rdtypes/ANY/SOA.py b/openpype/vendor/python/python_2/dns/rdtypes/ANY/SOA.py new file mode 100644 index 0000000000..aec81cad8a --- /dev/null +++ b/openpype/vendor/python/python_2/dns/rdtypes/ANY/SOA.py @@ -0,0 +1,116 @@ +# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license + +# Copyright (C) 2003-2007, 2009-2011 Nominum, Inc. +# +# Permission to use, copy, modify, and distribute this software and its +# documentation for any purpose with or without fee is hereby granted, +# provided that the above copyright notice and this permission notice +# appear in all copies. +# +# THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES +# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR +# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT +# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. + +import struct + +import dns.exception +import dns.rdata +import dns.name + + +class SOA(dns.rdata.Rdata): + + """SOA record + + @ivar mname: the SOA MNAME (master name) field + @type mname: dns.name.Name object + @ivar rname: the SOA RNAME (responsible name) field + @type rname: dns.name.Name object + @ivar serial: The zone's serial number + @type serial: int + @ivar refresh: The zone's refresh value (in seconds) + @type refresh: int + @ivar retry: The zone's retry value (in seconds) + @type retry: int + @ivar expire: The zone's expiration value (in seconds) + @type expire: int + @ivar minimum: The zone's negative caching time (in seconds, called + "minimum" for historical reasons) + @type minimum: int + @see: RFC 1035""" + + __slots__ = ['mname', 'rname', 'serial', 'refresh', 'retry', 'expire', + 'minimum'] + + def __init__(self, rdclass, rdtype, mname, rname, serial, refresh, retry, + expire, minimum): + super(SOA, self).__init__(rdclass, rdtype) + self.mname = mname + self.rname = rname + self.serial = serial + self.refresh = refresh + self.retry = retry + self.expire = expire + self.minimum = minimum + + def to_text(self, origin=None, relativize=True, **kw): + mname = self.mname.choose_relativity(origin, relativize) + rname = self.rname.choose_relativity(origin, relativize) + return '%s %s %d %d %d %d %d' % ( + mname, rname, self.serial, self.refresh, self.retry, + self.expire, self.minimum) + + @classmethod + def from_text(cls, rdclass, rdtype, tok, origin=None, relativize=True): + mname = tok.get_name() + rname = tok.get_name() + mname = mname.choose_relativity(origin, relativize) + rname = rname.choose_relativity(origin, relativize) + serial = tok.get_uint32() + refresh = tok.get_ttl() + retry = tok.get_ttl() + expire = tok.get_ttl() + minimum = tok.get_ttl() + tok.get_eol() + return cls(rdclass, rdtype, mname, rname, serial, refresh, retry, + expire, minimum) + + def to_wire(self, file, compress=None, origin=None): + self.mname.to_wire(file, compress, origin) + self.rname.to_wire(file, compress, origin) + five_ints = struct.pack('!IIIII', self.serial, self.refresh, + self.retry, self.expire, self.minimum) + file.write(five_ints) + + def to_digestable(self, origin=None): + return self.mname.to_digestable(origin) + \ + self.rname.to_digestable(origin) + \ + struct.pack('!IIIII', self.serial, self.refresh, + self.retry, self.expire, self.minimum) + + @classmethod + def from_wire(cls, rdclass, rdtype, wire, current, rdlen, origin=None): + (mname, cused) = dns.name.from_wire(wire[: current + rdlen], current) + current += cused + rdlen -= cused + (rname, cused) = dns.name.from_wire(wire[: current + rdlen], current) + current += cused + rdlen -= cused + if rdlen != 20: + raise dns.exception.FormError + five_ints = struct.unpack('!IIIII', + wire[current: current + rdlen]) + if origin is not None: + mname = mname.relativize(origin) + rname = rname.relativize(origin) + return cls(rdclass, rdtype, mname, rname, + five_ints[0], five_ints[1], five_ints[2], five_ints[3], + five_ints[4]) + + def choose_relativity(self, origin=None, relativize=True): + self.mname = self.mname.choose_relativity(origin, relativize) + self.rname = self.rname.choose_relativity(origin, relativize) diff --git a/openpype/vendor/python/python_2/dns/rdtypes/ANY/SPF.py b/openpype/vendor/python/python_2/dns/rdtypes/ANY/SPF.py new file mode 100644 index 0000000000..41dee62387 --- /dev/null +++ b/openpype/vendor/python/python_2/dns/rdtypes/ANY/SPF.py @@ -0,0 +1,25 @@ +# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license + +# Copyright (C) 2006, 2007, 2009-2011 Nominum, Inc. +# +# Permission to use, copy, modify, and distribute this software and its +# documentation for any purpose with or without fee is hereby granted, +# provided that the above copyright notice and this permission notice +# appear in all copies. +# +# THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES +# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR +# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT +# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. + +import dns.rdtypes.txtbase + + +class SPF(dns.rdtypes.txtbase.TXTBase): + + """SPF record + + @see: RFC 4408""" diff --git a/openpype/vendor/python/python_2/dns/rdtypes/ANY/SSHFP.py b/openpype/vendor/python/python_2/dns/rdtypes/ANY/SSHFP.py new file mode 100644 index 0000000000..c18311e906 --- /dev/null +++ b/openpype/vendor/python/python_2/dns/rdtypes/ANY/SSHFP.py @@ -0,0 +1,79 @@ +# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license + +# Copyright (C) 2005-2007, 2009-2011 Nominum, Inc. +# +# Permission to use, copy, modify, and distribute this software and its +# documentation for any purpose with or without fee is hereby granted, +# provided that the above copyright notice and this permission notice +# appear in all copies. +# +# THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES +# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR +# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT +# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. + +import struct +import binascii + +import dns.rdata +import dns.rdatatype + + +class SSHFP(dns.rdata.Rdata): + + """SSHFP record + + @ivar algorithm: the algorithm + @type algorithm: int + @ivar fp_type: the digest type + @type fp_type: int + @ivar fingerprint: the fingerprint + @type fingerprint: string + @see: draft-ietf-secsh-dns-05.txt""" + + __slots__ = ['algorithm', 'fp_type', 'fingerprint'] + + def __init__(self, rdclass, rdtype, algorithm, fp_type, + fingerprint): + super(SSHFP, self).__init__(rdclass, rdtype) + self.algorithm = algorithm + self.fp_type = fp_type + self.fingerprint = fingerprint + + def to_text(self, origin=None, relativize=True, **kw): + return '%d %d %s' % (self.algorithm, + self.fp_type, + dns.rdata._hexify(self.fingerprint, + chunksize=128)) + + @classmethod + def from_text(cls, rdclass, rdtype, tok, origin=None, relativize=True): + algorithm = tok.get_uint8() + fp_type = tok.get_uint8() + chunks = [] + while 1: + t = tok.get().unescape() + if t.is_eol_or_eof(): + break + if not t.is_identifier(): + raise dns.exception.SyntaxError + chunks.append(t.value.encode()) + fingerprint = b''.join(chunks) + fingerprint = binascii.unhexlify(fingerprint) + return cls(rdclass, rdtype, algorithm, fp_type, fingerprint) + + def to_wire(self, file, compress=None, origin=None): + header = struct.pack("!BB", self.algorithm, self.fp_type) + file.write(header) + file.write(self.fingerprint) + + @classmethod + def from_wire(cls, rdclass, rdtype, wire, current, rdlen, origin=None): + header = struct.unpack("!BB", wire[current: current + 2]) + current += 2 + rdlen -= 2 + fingerprint = wire[current: current + rdlen].unwrap() + return cls(rdclass, rdtype, header[0], header[1], fingerprint) diff --git a/openpype/vendor/python/python_2/dns/rdtypes/ANY/TLSA.py b/openpype/vendor/python/python_2/dns/rdtypes/ANY/TLSA.py new file mode 100644 index 0000000000..a135c2b3da --- /dev/null +++ b/openpype/vendor/python/python_2/dns/rdtypes/ANY/TLSA.py @@ -0,0 +1,84 @@ +# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license + +# Copyright (C) 2005-2007, 2009-2011 Nominum, Inc. +# +# Permission to use, copy, modify, and distribute this software and its +# documentation for any purpose with or without fee is hereby granted, +# provided that the above copyright notice and this permission notice +# appear in all copies. +# +# THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES +# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR +# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT +# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. + +import struct +import binascii + +import dns.rdata +import dns.rdatatype + + +class TLSA(dns.rdata.Rdata): + + """TLSA record + + @ivar usage: The certificate usage + @type usage: int + @ivar selector: The selector field + @type selector: int + @ivar mtype: The 'matching type' field + @type mtype: int + @ivar cert: The 'Certificate Association Data' field + @type cert: string + @see: RFC 6698""" + + __slots__ = ['usage', 'selector', 'mtype', 'cert'] + + def __init__(self, rdclass, rdtype, usage, selector, + mtype, cert): + super(TLSA, self).__init__(rdclass, rdtype) + self.usage = usage + self.selector = selector + self.mtype = mtype + self.cert = cert + + def to_text(self, origin=None, relativize=True, **kw): + return '%d %d %d %s' % (self.usage, + self.selector, + self.mtype, + dns.rdata._hexify(self.cert, + chunksize=128)) + + @classmethod + def from_text(cls, rdclass, rdtype, tok, origin=None, relativize=True): + usage = tok.get_uint8() + selector = tok.get_uint8() + mtype = tok.get_uint8() + cert_chunks = [] + while 1: + t = tok.get().unescape() + if t.is_eol_or_eof(): + break + if not t.is_identifier(): + raise dns.exception.SyntaxError + cert_chunks.append(t.value.encode()) + cert = b''.join(cert_chunks) + cert = binascii.unhexlify(cert) + return cls(rdclass, rdtype, usage, selector, mtype, cert) + + def to_wire(self, file, compress=None, origin=None): + header = struct.pack("!BBB", self.usage, self.selector, self.mtype) + file.write(header) + file.write(self.cert) + + @classmethod + def from_wire(cls, rdclass, rdtype, wire, current, rdlen, origin=None): + header = struct.unpack("!BBB", wire[current: current + 3]) + current += 3 + rdlen -= 3 + cert = wire[current: current + rdlen].unwrap() + return cls(rdclass, rdtype, header[0], header[1], header[2], cert) diff --git a/openpype/vendor/python/python_2/dns/rdtypes/ANY/TXT.py b/openpype/vendor/python/python_2/dns/rdtypes/ANY/TXT.py new file mode 100644 index 0000000000..c5ae919c5e --- /dev/null +++ b/openpype/vendor/python/python_2/dns/rdtypes/ANY/TXT.py @@ -0,0 +1,23 @@ +# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license + +# Copyright (C) 2003-2007, 2009-2011 Nominum, Inc. +# +# Permission to use, copy, modify, and distribute this software and its +# documentation for any purpose with or without fee is hereby granted, +# provided that the above copyright notice and this permission notice +# appear in all copies. +# +# THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES +# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR +# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT +# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. + +import dns.rdtypes.txtbase + + +class TXT(dns.rdtypes.txtbase.TXTBase): + + """TXT record""" diff --git a/openpype/vendor/python/python_2/dns/rdtypes/ANY/URI.py b/openpype/vendor/python/python_2/dns/rdtypes/ANY/URI.py new file mode 100644 index 0000000000..f5b65ed6a9 --- /dev/null +++ b/openpype/vendor/python/python_2/dns/rdtypes/ANY/URI.py @@ -0,0 +1,82 @@ +# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license + +# Copyright (C) 2003-2007, 2009-2011 Nominum, Inc. +# Copyright (C) 2015 Red Hat, Inc. +# +# Permission to use, copy, modify, and distribute this software and its +# documentation for any purpose with or without fee is hereby granted, +# provided that the above copyright notice and this permission notice +# appear in all copies. +# +# THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES +# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR +# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT +# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. + +import struct + +import dns.exception +import dns.rdata +import dns.name +from dns._compat import text_type + + +class URI(dns.rdata.Rdata): + + """URI record + + @ivar priority: the priority + @type priority: int + @ivar weight: the weight + @type weight: int + @ivar target: the target host + @type target: dns.name.Name object + @see: draft-faltstrom-uri-13""" + + __slots__ = ['priority', 'weight', 'target'] + + def __init__(self, rdclass, rdtype, priority, weight, target): + super(URI, self).__init__(rdclass, rdtype) + self.priority = priority + self.weight = weight + if len(target) < 1: + raise dns.exception.SyntaxError("URI target cannot be empty") + if isinstance(target, text_type): + self.target = target.encode() + else: + self.target = target + + def to_text(self, origin=None, relativize=True, **kw): + return '%d %d "%s"' % (self.priority, self.weight, + self.target.decode()) + + @classmethod + def from_text(cls, rdclass, rdtype, tok, origin=None, relativize=True): + priority = tok.get_uint16() + weight = tok.get_uint16() + target = tok.get().unescape() + if not (target.is_quoted_string() or target.is_identifier()): + raise dns.exception.SyntaxError("URI target must be a string") + tok.get_eol() + return cls(rdclass, rdtype, priority, weight, target.value) + + def to_wire(self, file, compress=None, origin=None): + two_ints = struct.pack("!HH", self.priority, self.weight) + file.write(two_ints) + file.write(self.target) + + @classmethod + def from_wire(cls, rdclass, rdtype, wire, current, rdlen, origin=None): + if rdlen < 5: + raise dns.exception.FormError('URI RR is shorter than 5 octets') + + (priority, weight) = struct.unpack('!HH', wire[current: current + 4]) + current += 4 + rdlen -= 4 + target = wire[current: current + rdlen] + current += rdlen + + return cls(rdclass, rdtype, priority, weight, target) diff --git a/openpype/vendor/python/python_2/dns/rdtypes/ANY/X25.py b/openpype/vendor/python/python_2/dns/rdtypes/ANY/X25.py new file mode 100644 index 0000000000..e530a2c2a6 --- /dev/null +++ b/openpype/vendor/python/python_2/dns/rdtypes/ANY/X25.py @@ -0,0 +1,66 @@ +# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license + +# Copyright (C) 2003-2007, 2009-2011 Nominum, Inc. +# +# Permission to use, copy, modify, and distribute this software and its +# documentation for any purpose with or without fee is hereby granted, +# provided that the above copyright notice and this permission notice +# appear in all copies. +# +# THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES +# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR +# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT +# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. + +import struct + +import dns.exception +import dns.rdata +import dns.tokenizer +from dns._compat import text_type + + +class X25(dns.rdata.Rdata): + + """X25 record + + @ivar address: the PSDN address + @type address: string + @see: RFC 1183""" + + __slots__ = ['address'] + + def __init__(self, rdclass, rdtype, address): + super(X25, self).__init__(rdclass, rdtype) + if isinstance(address, text_type): + self.address = address.encode() + else: + self.address = address + + def to_text(self, origin=None, relativize=True, **kw): + return '"%s"' % dns.rdata._escapify(self.address) + + @classmethod + def from_text(cls, rdclass, rdtype, tok, origin=None, relativize=True): + address = tok.get_string() + tok.get_eol() + return cls(rdclass, rdtype, address) + + def to_wire(self, file, compress=None, origin=None): + l = len(self.address) + assert l < 256 + file.write(struct.pack('!B', l)) + file.write(self.address) + + @classmethod + def from_wire(cls, rdclass, rdtype, wire, current, rdlen, origin=None): + l = wire[current] + current += 1 + rdlen -= 1 + if l != rdlen: + raise dns.exception.FormError + address = wire[current: current + l].unwrap() + return cls(rdclass, rdtype, address) diff --git a/openpype/vendor/python/python_2/dns/rdtypes/ANY/__init__.py b/openpype/vendor/python/python_2/dns/rdtypes/ANY/__init__.py new file mode 100644 index 0000000000..ca41ef8055 --- /dev/null +++ b/openpype/vendor/python/python_2/dns/rdtypes/ANY/__init__.py @@ -0,0 +1,57 @@ +# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license + +# Copyright (C) 2003-2007, 2009-2011 Nominum, Inc. +# +# Permission to use, copy, modify, and distribute this software and its +# documentation for any purpose with or without fee is hereby granted, +# provided that the above copyright notice and this permission notice +# appear in all copies. +# +# THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES +# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR +# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT +# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. + +"""Class ANY (generic) rdata type classes.""" + +__all__ = [ + 'AFSDB', + 'AVC', + 'CAA', + 'CDNSKEY', + 'CDS', + 'CERT', + 'CNAME', + 'CSYNC', + 'DLV', + 'DNAME', + 'DNSKEY', + 'DS', + 'EUI48', + 'EUI64', + 'GPOS', + 'HINFO', + 'HIP', + 'ISDN', + 'LOC', + 'MX', + 'NS', + 'NSEC', + 'NSEC3', + 'NSEC3PARAM', + 'OPENPGPKEY', + 'PTR', + 'RP', + 'RRSIG', + 'RT', + 'SOA', + 'SPF', + 'SSHFP', + 'TLSA', + 'TXT', + 'URI', + 'X25', +] diff --git a/openpype/vendor/python/python_2/dns/rdtypes/CH/A.py b/openpype/vendor/python/python_2/dns/rdtypes/CH/A.py new file mode 100644 index 0000000000..e65d192d82 --- /dev/null +++ b/openpype/vendor/python/python_2/dns/rdtypes/CH/A.py @@ -0,0 +1,70 @@ +# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license + +# Copyright (C) 2003-2007, 2009-2011 Nominum, Inc. +# +# Permission to use, copy, modify, and distribute this software and its +# documentation for any purpose with or without fee is hereby granted, +# provided that the above copyright notice and this permission notice +# appear in all copies. +# +# THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES +# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR +# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT +# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. + +import dns.rdtypes.mxbase +import struct + +class A(dns.rdtypes.mxbase.MXBase): + + """A record for Chaosnet + @ivar domain: the domain of the address + @type domain: dns.name.Name object + @ivar address: the 16-bit address + @type address: int""" + + __slots__ = ['domain', 'address'] + + def __init__(self, rdclass, rdtype, address, domain): + super(A, self).__init__(rdclass, rdtype, address, domain) + self.domain = domain + self.address = address + + def to_text(self, origin=None, relativize=True, **kw): + domain = self.domain.choose_relativity(origin, relativize) + return '%s %o' % (domain, self.address) + + @classmethod + def from_text(cls, rdclass, rdtype, tok, origin=None, relativize=True): + domain = tok.get_name() + address = tok.get_uint16(base=8) + domain = domain.choose_relativity(origin, relativize) + tok.get_eol() + return cls(rdclass, rdtype, address, domain) + + def to_wire(self, file, compress=None, origin=None): + self.domain.to_wire(file, compress, origin) + pref = struct.pack("!H", self.address) + file.write(pref) + + def to_digestable(self, origin=None): + return self.domain.to_digestable(origin) + \ + struct.pack("!H", self.address) + + @classmethod + def from_wire(cls, rdclass, rdtype, wire, current, rdlen, origin=None): + (domain, cused) = dns.name.from_wire(wire[: current + rdlen-2], + current) + current += cused + (address,) = struct.unpack('!H', wire[current: current + 2]) + if cused+2 != rdlen: + raise dns.exception.FormError + if origin is not None: + domain = domain.relativize(origin) + return cls(rdclass, rdtype, address, domain) + + def choose_relativity(self, origin=None, relativize=True): + self.domain = self.domain.choose_relativity(origin, relativize) diff --git a/openpype/vendor/python/python_2/dns/rdtypes/CH/__init__.py b/openpype/vendor/python/python_2/dns/rdtypes/CH/__init__.py new file mode 100644 index 0000000000..7184a7332a --- /dev/null +++ b/openpype/vendor/python/python_2/dns/rdtypes/CH/__init__.py @@ -0,0 +1,22 @@ +# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license + +# Copyright (C) 2003-2007, 2009-2011 Nominum, Inc. +# +# Permission to use, copy, modify, and distribute this software and its +# documentation for any purpose with or without fee is hereby granted, +# provided that the above copyright notice and this permission notice +# appear in all copies. +# +# THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES +# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR +# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT +# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. + +"""Class CH rdata type classes.""" + +__all__ = [ + 'A', +] diff --git a/openpype/vendor/python/python_2/dns/rdtypes/IN/A.py b/openpype/vendor/python/python_2/dns/rdtypes/IN/A.py new file mode 100644 index 0000000000..8998982462 --- /dev/null +++ b/openpype/vendor/python/python_2/dns/rdtypes/IN/A.py @@ -0,0 +1,54 @@ +# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license + +# Copyright (C) 2003-2007, 2009-2011 Nominum, Inc. +# +# Permission to use, copy, modify, and distribute this software and its +# documentation for any purpose with or without fee is hereby granted, +# provided that the above copyright notice and this permission notice +# appear in all copies. +# +# THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES +# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR +# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT +# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. + +import dns.exception +import dns.ipv4 +import dns.rdata +import dns.tokenizer + + +class A(dns.rdata.Rdata): + + """A record. + + @ivar address: an IPv4 address + @type address: string (in the standard "dotted quad" format)""" + + __slots__ = ['address'] + + def __init__(self, rdclass, rdtype, address): + super(A, self).__init__(rdclass, rdtype) + # check that it's OK + dns.ipv4.inet_aton(address) + self.address = address + + def to_text(self, origin=None, relativize=True, **kw): + return self.address + + @classmethod + def from_text(cls, rdclass, rdtype, tok, origin=None, relativize=True): + address = tok.get_identifier() + tok.get_eol() + return cls(rdclass, rdtype, address) + + def to_wire(self, file, compress=None, origin=None): + file.write(dns.ipv4.inet_aton(self.address)) + + @classmethod + def from_wire(cls, rdclass, rdtype, wire, current, rdlen, origin=None): + address = dns.ipv4.inet_ntoa(wire[current: current + rdlen]) + return cls(rdclass, rdtype, address) diff --git a/openpype/vendor/python/python_2/dns/rdtypes/IN/AAAA.py b/openpype/vendor/python/python_2/dns/rdtypes/IN/AAAA.py new file mode 100644 index 0000000000..a77c5bf2a5 --- /dev/null +++ b/openpype/vendor/python/python_2/dns/rdtypes/IN/AAAA.py @@ -0,0 +1,55 @@ +# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license + +# Copyright (C) 2003-2007, 2009-2011 Nominum, Inc. +# +# Permission to use, copy, modify, and distribute this software and its +# documentation for any purpose with or without fee is hereby granted, +# provided that the above copyright notice and this permission notice +# appear in all copies. +# +# THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES +# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR +# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT +# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. + +import dns.exception +import dns.inet +import dns.rdata +import dns.tokenizer + + +class AAAA(dns.rdata.Rdata): + + """AAAA record. + + @ivar address: an IPv6 address + @type address: string (in the standard IPv6 format)""" + + __slots__ = ['address'] + + def __init__(self, rdclass, rdtype, address): + super(AAAA, self).__init__(rdclass, rdtype) + # check that it's OK + dns.inet.inet_pton(dns.inet.AF_INET6, address) + self.address = address + + def to_text(self, origin=None, relativize=True, **kw): + return self.address + + @classmethod + def from_text(cls, rdclass, rdtype, tok, origin=None, relativize=True): + address = tok.get_identifier() + tok.get_eol() + return cls(rdclass, rdtype, address) + + def to_wire(self, file, compress=None, origin=None): + file.write(dns.inet.inet_pton(dns.inet.AF_INET6, self.address)) + + @classmethod + def from_wire(cls, rdclass, rdtype, wire, current, rdlen, origin=None): + address = dns.inet.inet_ntop(dns.inet.AF_INET6, + wire[current: current + rdlen]) + return cls(rdclass, rdtype, address) diff --git a/openpype/vendor/python/python_2/dns/rdtypes/IN/APL.py b/openpype/vendor/python/python_2/dns/rdtypes/IN/APL.py new file mode 100644 index 0000000000..48faf88ab7 --- /dev/null +++ b/openpype/vendor/python/python_2/dns/rdtypes/IN/APL.py @@ -0,0 +1,165 @@ +# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license + +# Copyright (C) 2003-2017 Nominum, Inc. +# +# Permission to use, copy, modify, and distribute this software and its +# documentation for any purpose with or without fee is hereby granted, +# provided that the above copyright notice and this permission notice +# appear in all copies. +# +# THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES +# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR +# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT +# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. + +import binascii +import codecs +import struct + +import dns.exception +import dns.inet +import dns.rdata +import dns.tokenizer +from dns._compat import xrange, maybe_chr + + +class APLItem(object): + + """An APL list item. + + @ivar family: the address family (IANA address family registry) + @type family: int + @ivar negation: is this item negated? + @type negation: bool + @ivar address: the address + @type address: string + @ivar prefix: the prefix length + @type prefix: int + """ + + __slots__ = ['family', 'negation', 'address', 'prefix'] + + def __init__(self, family, negation, address, prefix): + self.family = family + self.negation = negation + self.address = address + self.prefix = prefix + + def __str__(self): + if self.negation: + return "!%d:%s/%s" % (self.family, self.address, self.prefix) + else: + return "%d:%s/%s" % (self.family, self.address, self.prefix) + + def to_wire(self, file): + if self.family == 1: + address = dns.inet.inet_pton(dns.inet.AF_INET, self.address) + elif self.family == 2: + address = dns.inet.inet_pton(dns.inet.AF_INET6, self.address) + else: + address = binascii.unhexlify(self.address) + # + # Truncate least significant zero bytes. + # + last = 0 + for i in xrange(len(address) - 1, -1, -1): + if address[i] != maybe_chr(0): + last = i + 1 + break + address = address[0: last] + l = len(address) + assert l < 128 + if self.negation: + l |= 0x80 + header = struct.pack('!HBB', self.family, self.prefix, l) + file.write(header) + file.write(address) + + +class APL(dns.rdata.Rdata): + + """APL record. + + @ivar items: a list of APL items + @type items: list of APL_Item + @see: RFC 3123""" + + __slots__ = ['items'] + + def __init__(self, rdclass, rdtype, items): + super(APL, self).__init__(rdclass, rdtype) + self.items = items + + def to_text(self, origin=None, relativize=True, **kw): + return ' '.join(map(str, self.items)) + + @classmethod + def from_text(cls, rdclass, rdtype, tok, origin=None, relativize=True): + items = [] + while 1: + token = tok.get().unescape() + if token.is_eol_or_eof(): + break + item = token.value + if item[0] == '!': + negation = True + item = item[1:] + else: + negation = False + (family, rest) = item.split(':', 1) + family = int(family) + (address, prefix) = rest.split('/', 1) + prefix = int(prefix) + item = APLItem(family, negation, address, prefix) + items.append(item) + + return cls(rdclass, rdtype, items) + + def to_wire(self, file, compress=None, origin=None): + for item in self.items: + item.to_wire(file) + + @classmethod + def from_wire(cls, rdclass, rdtype, wire, current, rdlen, origin=None): + + items = [] + while 1: + if rdlen == 0: + break + if rdlen < 4: + raise dns.exception.FormError + header = struct.unpack('!HBB', wire[current: current + 4]) + afdlen = header[2] + if afdlen > 127: + negation = True + afdlen -= 128 + else: + negation = False + current += 4 + rdlen -= 4 + if rdlen < afdlen: + raise dns.exception.FormError + address = wire[current: current + afdlen].unwrap() + l = len(address) + if header[0] == 1: + if l < 4: + address += b'\x00' * (4 - l) + address = dns.inet.inet_ntop(dns.inet.AF_INET, address) + elif header[0] == 2: + if l < 16: + address += b'\x00' * (16 - l) + address = dns.inet.inet_ntop(dns.inet.AF_INET6, address) + else: + # + # This isn't really right according to the RFC, but it + # seems better than throwing an exception + # + address = codecs.encode(address, 'hex_codec') + current += afdlen + rdlen -= afdlen + item = APLItem(header[0], negation, address, header[1]) + items.append(item) + return cls(rdclass, rdtype, items) diff --git a/openpype/vendor/python/python_2/dns/rdtypes/IN/DHCID.py b/openpype/vendor/python/python_2/dns/rdtypes/IN/DHCID.py new file mode 100644 index 0000000000..cec64590f0 --- /dev/null +++ b/openpype/vendor/python/python_2/dns/rdtypes/IN/DHCID.py @@ -0,0 +1,61 @@ +# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license + +# Copyright (C) 2006, 2007, 2009-2011 Nominum, Inc. +# +# Permission to use, copy, modify, and distribute this software and its +# documentation for any purpose with or without fee is hereby granted, +# provided that the above copyright notice and this permission notice +# appear in all copies. +# +# THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES +# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR +# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT +# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. + +import base64 + +import dns.exception + + +class DHCID(dns.rdata.Rdata): + + """DHCID record + + @ivar data: the data (the content of the RR is opaque as far as the + DNS is concerned) + @type data: string + @see: RFC 4701""" + + __slots__ = ['data'] + + def __init__(self, rdclass, rdtype, data): + super(DHCID, self).__init__(rdclass, rdtype) + self.data = data + + def to_text(self, origin=None, relativize=True, **kw): + return dns.rdata._base64ify(self.data) + + @classmethod + def from_text(cls, rdclass, rdtype, tok, origin=None, relativize=True): + chunks = [] + while 1: + t = tok.get().unescape() + if t.is_eol_or_eof(): + break + if not t.is_identifier(): + raise dns.exception.SyntaxError + chunks.append(t.value.encode()) + b64 = b''.join(chunks) + data = base64.b64decode(b64) + return cls(rdclass, rdtype, data) + + def to_wire(self, file, compress=None, origin=None): + file.write(self.data) + + @classmethod + def from_wire(cls, rdclass, rdtype, wire, current, rdlen, origin=None): + data = wire[current: current + rdlen].unwrap() + return cls(rdclass, rdtype, data) diff --git a/openpype/vendor/python/python_2/dns/rdtypes/IN/IPSECKEY.py b/openpype/vendor/python/python_2/dns/rdtypes/IN/IPSECKEY.py new file mode 100644 index 0000000000..8f49ba137d --- /dev/null +++ b/openpype/vendor/python/python_2/dns/rdtypes/IN/IPSECKEY.py @@ -0,0 +1,150 @@ +# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license + +# Copyright (C) 2006, 2007, 2009-2011 Nominum, Inc. +# +# Permission to use, copy, modify, and distribute this software and its +# documentation for any purpose with or without fee is hereby granted, +# provided that the above copyright notice and this permission notice +# appear in all copies. +# +# THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES +# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR +# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT +# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. + +import struct +import base64 + +import dns.exception +import dns.inet +import dns.name + + +class IPSECKEY(dns.rdata.Rdata): + + """IPSECKEY record + + @ivar precedence: the precedence for this key data + @type precedence: int + @ivar gateway_type: the gateway type + @type gateway_type: int + @ivar algorithm: the algorithm to use + @type algorithm: int + @ivar gateway: the public key + @type gateway: None, IPv4 address, IPV6 address, or domain name + @ivar key: the public key + @type key: string + @see: RFC 4025""" + + __slots__ = ['precedence', 'gateway_type', 'algorithm', 'gateway', 'key'] + + def __init__(self, rdclass, rdtype, precedence, gateway_type, algorithm, + gateway, key): + super(IPSECKEY, self).__init__(rdclass, rdtype) + if gateway_type == 0: + if gateway != '.' and gateway is not None: + raise SyntaxError('invalid gateway for gateway type 0') + gateway = None + elif gateway_type == 1: + # check that it's OK + dns.inet.inet_pton(dns.inet.AF_INET, gateway) + elif gateway_type == 2: + # check that it's OK + dns.inet.inet_pton(dns.inet.AF_INET6, gateway) + elif gateway_type == 3: + pass + else: + raise SyntaxError( + 'invalid IPSECKEY gateway type: %d' % gateway_type) + self.precedence = precedence + self.gateway_type = gateway_type + self.algorithm = algorithm + self.gateway = gateway + self.key = key + + def to_text(self, origin=None, relativize=True, **kw): + if self.gateway_type == 0: + gateway = '.' + elif self.gateway_type == 1: + gateway = self.gateway + elif self.gateway_type == 2: + gateway = self.gateway + elif self.gateway_type == 3: + gateway = str(self.gateway.choose_relativity(origin, relativize)) + else: + raise ValueError('invalid gateway type') + return '%d %d %d %s %s' % (self.precedence, self.gateway_type, + self.algorithm, gateway, + dns.rdata._base64ify(self.key)) + + @classmethod + def from_text(cls, rdclass, rdtype, tok, origin=None, relativize=True): + precedence = tok.get_uint8() + gateway_type = tok.get_uint8() + algorithm = tok.get_uint8() + if gateway_type == 3: + gateway = tok.get_name().choose_relativity(origin, relativize) + else: + gateway = tok.get_string() + chunks = [] + while 1: + t = tok.get().unescape() + if t.is_eol_or_eof(): + break + if not t.is_identifier(): + raise dns.exception.SyntaxError + chunks.append(t.value.encode()) + b64 = b''.join(chunks) + key = base64.b64decode(b64) + return cls(rdclass, rdtype, precedence, gateway_type, algorithm, + gateway, key) + + def to_wire(self, file, compress=None, origin=None): + header = struct.pack("!BBB", self.precedence, self.gateway_type, + self.algorithm) + file.write(header) + if self.gateway_type == 0: + pass + elif self.gateway_type == 1: + file.write(dns.inet.inet_pton(dns.inet.AF_INET, self.gateway)) + elif self.gateway_type == 2: + file.write(dns.inet.inet_pton(dns.inet.AF_INET6, self.gateway)) + elif self.gateway_type == 3: + self.gateway.to_wire(file, None, origin) + else: + raise ValueError('invalid gateway type') + file.write(self.key) + + @classmethod + def from_wire(cls, rdclass, rdtype, wire, current, rdlen, origin=None): + if rdlen < 3: + raise dns.exception.FormError + header = struct.unpack('!BBB', wire[current: current + 3]) + gateway_type = header[1] + current += 3 + rdlen -= 3 + if gateway_type == 0: + gateway = None + elif gateway_type == 1: + gateway = dns.inet.inet_ntop(dns.inet.AF_INET, + wire[current: current + 4]) + current += 4 + rdlen -= 4 + elif gateway_type == 2: + gateway = dns.inet.inet_ntop(dns.inet.AF_INET6, + wire[current: current + 16]) + current += 16 + rdlen -= 16 + elif gateway_type == 3: + (gateway, cused) = dns.name.from_wire(wire[: current + rdlen], + current) + current += cused + rdlen -= cused + else: + raise dns.exception.FormError('invalid IPSECKEY gateway type') + key = wire[current: current + rdlen].unwrap() + return cls(rdclass, rdtype, header[0], gateway_type, header[2], + gateway, key) diff --git a/openpype/vendor/python/python_2/dns/rdtypes/IN/KX.py b/openpype/vendor/python/python_2/dns/rdtypes/IN/KX.py new file mode 100644 index 0000000000..1318a582e7 --- /dev/null +++ b/openpype/vendor/python/python_2/dns/rdtypes/IN/KX.py @@ -0,0 +1,23 @@ +# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license + +# Copyright (C) 2003-2007, 2009-2011 Nominum, Inc. +# +# Permission to use, copy, modify, and distribute this software and its +# documentation for any purpose with or without fee is hereby granted, +# provided that the above copyright notice and this permission notice +# appear in all copies. +# +# THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES +# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR +# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT +# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. + +import dns.rdtypes.mxbase + + +class KX(dns.rdtypes.mxbase.UncompressedMX): + + """KX record""" diff --git a/openpype/vendor/python/python_2/dns/rdtypes/IN/NAPTR.py b/openpype/vendor/python/python_2/dns/rdtypes/IN/NAPTR.py new file mode 100644 index 0000000000..32fa4745ea --- /dev/null +++ b/openpype/vendor/python/python_2/dns/rdtypes/IN/NAPTR.py @@ -0,0 +1,127 @@ +# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license + +# Copyright (C) 2003-2007, 2009-2011 Nominum, Inc. +# +# Permission to use, copy, modify, and distribute this software and its +# documentation for any purpose with or without fee is hereby granted, +# provided that the above copyright notice and this permission notice +# appear in all copies. +# +# THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES +# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR +# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT +# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. + +import struct + +import dns.exception +import dns.name +import dns.rdata +from dns._compat import xrange, text_type + + +def _write_string(file, s): + l = len(s) + assert l < 256 + file.write(struct.pack('!B', l)) + file.write(s) + + +def _sanitize(value): + if isinstance(value, text_type): + return value.encode() + return value + + +class NAPTR(dns.rdata.Rdata): + + """NAPTR record + + @ivar order: order + @type order: int + @ivar preference: preference + @type preference: int + @ivar flags: flags + @type flags: string + @ivar service: service + @type service: string + @ivar regexp: regular expression + @type regexp: string + @ivar replacement: replacement name + @type replacement: dns.name.Name object + @see: RFC 3403""" + + __slots__ = ['order', 'preference', 'flags', 'service', 'regexp', + 'replacement'] + + def __init__(self, rdclass, rdtype, order, preference, flags, service, + regexp, replacement): + super(NAPTR, self).__init__(rdclass, rdtype) + self.flags = _sanitize(flags) + self.service = _sanitize(service) + self.regexp = _sanitize(regexp) + self.order = order + self.preference = preference + self.replacement = replacement + + def to_text(self, origin=None, relativize=True, **kw): + replacement = self.replacement.choose_relativity(origin, relativize) + return '%d %d "%s" "%s" "%s" %s' % \ + (self.order, self.preference, + dns.rdata._escapify(self.flags), + dns.rdata._escapify(self.service), + dns.rdata._escapify(self.regexp), + replacement) + + @classmethod + def from_text(cls, rdclass, rdtype, tok, origin=None, relativize=True): + order = tok.get_uint16() + preference = tok.get_uint16() + flags = tok.get_string() + service = tok.get_string() + regexp = tok.get_string() + replacement = tok.get_name() + replacement = replacement.choose_relativity(origin, relativize) + tok.get_eol() + return cls(rdclass, rdtype, order, preference, flags, service, + regexp, replacement) + + def to_wire(self, file, compress=None, origin=None): + two_ints = struct.pack("!HH", self.order, self.preference) + file.write(two_ints) + _write_string(file, self.flags) + _write_string(file, self.service) + _write_string(file, self.regexp) + self.replacement.to_wire(file, compress, origin) + + @classmethod + def from_wire(cls, rdclass, rdtype, wire, current, rdlen, origin=None): + (order, preference) = struct.unpack('!HH', wire[current: current + 4]) + current += 4 + rdlen -= 4 + strings = [] + for i in xrange(3): + l = wire[current] + current += 1 + rdlen -= 1 + if l > rdlen or rdlen < 0: + raise dns.exception.FormError + s = wire[current: current + l].unwrap() + current += l + rdlen -= l + strings.append(s) + (replacement, cused) = dns.name.from_wire(wire[: current + rdlen], + current) + if cused != rdlen: + raise dns.exception.FormError + if origin is not None: + replacement = replacement.relativize(origin) + return cls(rdclass, rdtype, order, preference, strings[0], strings[1], + strings[2], replacement) + + def choose_relativity(self, origin=None, relativize=True): + self.replacement = self.replacement.choose_relativity(origin, + relativize) diff --git a/openpype/vendor/python/python_2/dns/rdtypes/IN/NSAP.py b/openpype/vendor/python/python_2/dns/rdtypes/IN/NSAP.py new file mode 100644 index 0000000000..336befc7f2 --- /dev/null +++ b/openpype/vendor/python/python_2/dns/rdtypes/IN/NSAP.py @@ -0,0 +1,60 @@ +# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license + +# Copyright (C) 2003-2007, 2009-2011 Nominum, Inc. +# +# Permission to use, copy, modify, and distribute this software and its +# documentation for any purpose with or without fee is hereby granted, +# provided that the above copyright notice and this permission notice +# appear in all copies. +# +# THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES +# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR +# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT +# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. + +import binascii + +import dns.exception +import dns.rdata +import dns.tokenizer + + +class NSAP(dns.rdata.Rdata): + + """NSAP record. + + @ivar address: a NASP + @type address: string + @see: RFC 1706""" + + __slots__ = ['address'] + + def __init__(self, rdclass, rdtype, address): + super(NSAP, self).__init__(rdclass, rdtype) + self.address = address + + def to_text(self, origin=None, relativize=True, **kw): + return "0x%s" % binascii.hexlify(self.address).decode() + + @classmethod + def from_text(cls, rdclass, rdtype, tok, origin=None, relativize=True): + address = tok.get_string() + tok.get_eol() + if address[0:2] != '0x': + raise dns.exception.SyntaxError('string does not start with 0x') + address = address[2:].replace('.', '') + if len(address) % 2 != 0: + raise dns.exception.SyntaxError('hexstring has odd length') + address = binascii.unhexlify(address.encode()) + return cls(rdclass, rdtype, address) + + def to_wire(self, file, compress=None, origin=None): + file.write(self.address) + + @classmethod + def from_wire(cls, rdclass, rdtype, wire, current, rdlen, origin=None): + address = wire[current: current + rdlen].unwrap() + return cls(rdclass, rdtype, address) diff --git a/openpype/vendor/python/python_2/dns/rdtypes/IN/NSAP_PTR.py b/openpype/vendor/python/python_2/dns/rdtypes/IN/NSAP_PTR.py new file mode 100644 index 0000000000..a5b66c803f --- /dev/null +++ b/openpype/vendor/python/python_2/dns/rdtypes/IN/NSAP_PTR.py @@ -0,0 +1,23 @@ +# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license + +# Copyright (C) 2003-2007, 2009-2011 Nominum, Inc. +# +# Permission to use, copy, modify, and distribute this software and its +# documentation for any purpose with or without fee is hereby granted, +# provided that the above copyright notice and this permission notice +# appear in all copies. +# +# THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES +# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR +# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT +# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. + +import dns.rdtypes.nsbase + + +class NSAP_PTR(dns.rdtypes.nsbase.UncompressedNS): + + """NSAP-PTR record""" diff --git a/openpype/vendor/python/python_2/dns/rdtypes/IN/PX.py b/openpype/vendor/python/python_2/dns/rdtypes/IN/PX.py new file mode 100644 index 0000000000..2dbaee6ce8 --- /dev/null +++ b/openpype/vendor/python/python_2/dns/rdtypes/IN/PX.py @@ -0,0 +1,89 @@ +# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license + +# Copyright (C) 2003-2007, 2009-2011 Nominum, Inc. +# +# Permission to use, copy, modify, and distribute this software and its +# documentation for any purpose with or without fee is hereby granted, +# provided that the above copyright notice and this permission notice +# appear in all copies. +# +# THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES +# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR +# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT +# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. + +import struct + +import dns.exception +import dns.rdata +import dns.name + + +class PX(dns.rdata.Rdata): + + """PX record. + + @ivar preference: the preference value + @type preference: int + @ivar map822: the map822 name + @type map822: dns.name.Name object + @ivar mapx400: the mapx400 name + @type mapx400: dns.name.Name object + @see: RFC 2163""" + + __slots__ = ['preference', 'map822', 'mapx400'] + + def __init__(self, rdclass, rdtype, preference, map822, mapx400): + super(PX, self).__init__(rdclass, rdtype) + self.preference = preference + self.map822 = map822 + self.mapx400 = mapx400 + + def to_text(self, origin=None, relativize=True, **kw): + map822 = self.map822.choose_relativity(origin, relativize) + mapx400 = self.mapx400.choose_relativity(origin, relativize) + return '%d %s %s' % (self.preference, map822, mapx400) + + @classmethod + def from_text(cls, rdclass, rdtype, tok, origin=None, relativize=True): + preference = tok.get_uint16() + map822 = tok.get_name() + map822 = map822.choose_relativity(origin, relativize) + mapx400 = tok.get_name(None) + mapx400 = mapx400.choose_relativity(origin, relativize) + tok.get_eol() + return cls(rdclass, rdtype, preference, map822, mapx400) + + def to_wire(self, file, compress=None, origin=None): + pref = struct.pack("!H", self.preference) + file.write(pref) + self.map822.to_wire(file, None, origin) + self.mapx400.to_wire(file, None, origin) + + @classmethod + def from_wire(cls, rdclass, rdtype, wire, current, rdlen, origin=None): + (preference, ) = struct.unpack('!H', wire[current: current + 2]) + current += 2 + rdlen -= 2 + (map822, cused) = dns.name.from_wire(wire[: current + rdlen], + current) + if cused > rdlen: + raise dns.exception.FormError + current += cused + rdlen -= cused + if origin is not None: + map822 = map822.relativize(origin) + (mapx400, cused) = dns.name.from_wire(wire[: current + rdlen], + current) + if cused != rdlen: + raise dns.exception.FormError + if origin is not None: + mapx400 = mapx400.relativize(origin) + return cls(rdclass, rdtype, preference, map822, mapx400) + + def choose_relativity(self, origin=None, relativize=True): + self.map822 = self.map822.choose_relativity(origin, relativize) + self.mapx400 = self.mapx400.choose_relativity(origin, relativize) diff --git a/openpype/vendor/python/python_2/dns/rdtypes/IN/SRV.py b/openpype/vendor/python/python_2/dns/rdtypes/IN/SRV.py new file mode 100644 index 0000000000..b2c1bc9f0b --- /dev/null +++ b/openpype/vendor/python/python_2/dns/rdtypes/IN/SRV.py @@ -0,0 +1,83 @@ +# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license + +# Copyright (C) 2003-2007, 2009-2011 Nominum, Inc. +# +# Permission to use, copy, modify, and distribute this software and its +# documentation for any purpose with or without fee is hereby granted, +# provided that the above copyright notice and this permission notice +# appear in all copies. +# +# THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES +# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR +# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT +# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. + +import struct + +import dns.exception +import dns.rdata +import dns.name + + +class SRV(dns.rdata.Rdata): + + """SRV record + + @ivar priority: the priority + @type priority: int + @ivar weight: the weight + @type weight: int + @ivar port: the port of the service + @type port: int + @ivar target: the target host + @type target: dns.name.Name object + @see: RFC 2782""" + + __slots__ = ['priority', 'weight', 'port', 'target'] + + def __init__(self, rdclass, rdtype, priority, weight, port, target): + super(SRV, self).__init__(rdclass, rdtype) + self.priority = priority + self.weight = weight + self.port = port + self.target = target + + def to_text(self, origin=None, relativize=True, **kw): + target = self.target.choose_relativity(origin, relativize) + return '%d %d %d %s' % (self.priority, self.weight, self.port, + target) + + @classmethod + def from_text(cls, rdclass, rdtype, tok, origin=None, relativize=True): + priority = tok.get_uint16() + weight = tok.get_uint16() + port = tok.get_uint16() + target = tok.get_name(None) + target = target.choose_relativity(origin, relativize) + tok.get_eol() + return cls(rdclass, rdtype, priority, weight, port, target) + + def to_wire(self, file, compress=None, origin=None): + three_ints = struct.pack("!HHH", self.priority, self.weight, self.port) + file.write(three_ints) + self.target.to_wire(file, compress, origin) + + @classmethod + def from_wire(cls, rdclass, rdtype, wire, current, rdlen, origin=None): + (priority, weight, port) = struct.unpack('!HHH', + wire[current: current + 6]) + current += 6 + rdlen -= 6 + (target, cused) = dns.name.from_wire(wire[: current + rdlen], + current) + if cused != rdlen: + raise dns.exception.FormError + if origin is not None: + target = target.relativize(origin) + return cls(rdclass, rdtype, priority, weight, port, target) + + def choose_relativity(self, origin=None, relativize=True): + self.target = self.target.choose_relativity(origin, relativize) diff --git a/openpype/vendor/python/python_2/dns/rdtypes/IN/WKS.py b/openpype/vendor/python/python_2/dns/rdtypes/IN/WKS.py new file mode 100644 index 0000000000..96f98ada70 --- /dev/null +++ b/openpype/vendor/python/python_2/dns/rdtypes/IN/WKS.py @@ -0,0 +1,107 @@ +# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license + +# Copyright (C) 2003-2007, 2009-2011 Nominum, Inc. +# +# Permission to use, copy, modify, and distribute this software and its +# documentation for any purpose with or without fee is hereby granted, +# provided that the above copyright notice and this permission notice +# appear in all copies. +# +# THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES +# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR +# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT +# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. + +import socket +import struct + +import dns.ipv4 +import dns.rdata +from dns._compat import xrange + +_proto_tcp = socket.getprotobyname('tcp') +_proto_udp = socket.getprotobyname('udp') + + +class WKS(dns.rdata.Rdata): + + """WKS record + + @ivar address: the address + @type address: string + @ivar protocol: the protocol + @type protocol: int + @ivar bitmap: the bitmap + @type bitmap: string + @see: RFC 1035""" + + __slots__ = ['address', 'protocol', 'bitmap'] + + def __init__(self, rdclass, rdtype, address, protocol, bitmap): + super(WKS, self).__init__(rdclass, rdtype) + self.address = address + self.protocol = protocol + if not isinstance(bitmap, bytearray): + self.bitmap = bytearray(bitmap) + else: + self.bitmap = bitmap + + def to_text(self, origin=None, relativize=True, **kw): + bits = [] + for i in xrange(0, len(self.bitmap)): + byte = self.bitmap[i] + for j in xrange(0, 8): + if byte & (0x80 >> j): + bits.append(str(i * 8 + j)) + text = ' '.join(bits) + return '%s %d %s' % (self.address, self.protocol, text) + + @classmethod + def from_text(cls, rdclass, rdtype, tok, origin=None, relativize=True): + address = tok.get_string() + protocol = tok.get_string() + if protocol.isdigit(): + protocol = int(protocol) + else: + protocol = socket.getprotobyname(protocol) + bitmap = bytearray() + while 1: + token = tok.get().unescape() + if token.is_eol_or_eof(): + break + if token.value.isdigit(): + serv = int(token.value) + else: + if protocol != _proto_udp and protocol != _proto_tcp: + raise NotImplementedError("protocol must be TCP or UDP") + if protocol == _proto_udp: + protocol_text = "udp" + else: + protocol_text = "tcp" + serv = socket.getservbyname(token.value, protocol_text) + i = serv // 8 + l = len(bitmap) + if l < i + 1: + for j in xrange(l, i + 1): + bitmap.append(0) + bitmap[i] = bitmap[i] | (0x80 >> (serv % 8)) + bitmap = dns.rdata._truncate_bitmap(bitmap) + return cls(rdclass, rdtype, address, protocol, bitmap) + + def to_wire(self, file, compress=None, origin=None): + file.write(dns.ipv4.inet_aton(self.address)) + protocol = struct.pack('!B', self.protocol) + file.write(protocol) + file.write(self.bitmap) + + @classmethod + def from_wire(cls, rdclass, rdtype, wire, current, rdlen, origin=None): + address = dns.ipv4.inet_ntoa(wire[current: current + 4]) + protocol, = struct.unpack('!B', wire[current + 4: current + 5]) + current += 5 + rdlen -= 5 + bitmap = wire[current: current + rdlen].unwrap() + return cls(rdclass, rdtype, address, protocol, bitmap) diff --git a/openpype/vendor/python/python_2/dns/rdtypes/IN/__init__.py b/openpype/vendor/python/python_2/dns/rdtypes/IN/__init__.py new file mode 100644 index 0000000000..d7e69c9f60 --- /dev/null +++ b/openpype/vendor/python/python_2/dns/rdtypes/IN/__init__.py @@ -0,0 +1,33 @@ +# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license + +# Copyright (C) 2003-2007, 2009-2011 Nominum, Inc. +# +# Permission to use, copy, modify, and distribute this software and its +# documentation for any purpose with or without fee is hereby granted, +# provided that the above copyright notice and this permission notice +# appear in all copies. +# +# THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES +# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR +# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT +# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. + +"""Class IN rdata type classes.""" + +__all__ = [ + 'A', + 'AAAA', + 'APL', + 'DHCID', + 'IPSECKEY', + 'KX', + 'NAPTR', + 'NSAP', + 'NSAP_PTR', + 'PX', + 'SRV', + 'WKS', +] diff --git a/openpype/vendor/python/python_2/dns/rdtypes/__init__.py b/openpype/vendor/python/python_2/dns/rdtypes/__init__.py new file mode 100644 index 0000000000..1ac137f1fe --- /dev/null +++ b/openpype/vendor/python/python_2/dns/rdtypes/__init__.py @@ -0,0 +1,27 @@ +# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license + +# Copyright (C) 2003-2007, 2009-2011 Nominum, Inc. +# +# Permission to use, copy, modify, and distribute this software and its +# documentation for any purpose with or without fee is hereby granted, +# provided that the above copyright notice and this permission notice +# appear in all copies. +# +# THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES +# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR +# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT +# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. + +"""DNS rdata type classes""" + +__all__ = [ + 'ANY', + 'IN', + 'CH', + 'euibase', + 'mxbase', + 'nsbase', +] diff --git a/openpype/vendor/python/python_2/dns/rdtypes/dnskeybase.py b/openpype/vendor/python/python_2/dns/rdtypes/dnskeybase.py new file mode 100644 index 0000000000..3e7e87ef15 --- /dev/null +++ b/openpype/vendor/python/python_2/dns/rdtypes/dnskeybase.py @@ -0,0 +1,138 @@ +# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license + +# Copyright (C) 2004-2007, 2009-2011 Nominum, Inc. +# +# Permission to use, copy, modify, and distribute this software and its +# documentation for any purpose with or without fee is hereby granted, +# provided that the above copyright notice and this permission notice +# appear in all copies. +# +# THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES +# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR +# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT +# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. + +import base64 +import struct + +import dns.exception +import dns.dnssec +import dns.rdata + +# wildcard import +__all__ = ["SEP", "REVOKE", "ZONE", + "flags_to_text_set", "flags_from_text_set"] + +# flag constants +SEP = 0x0001 +REVOKE = 0x0080 +ZONE = 0x0100 + +_flag_by_text = { + 'SEP': SEP, + 'REVOKE': REVOKE, + 'ZONE': ZONE +} + +# We construct the inverse mapping programmatically to ensure that we +# cannot make any mistakes (e.g. omissions, cut-and-paste errors) that +# would cause the mapping not to be true inverse. +_flag_by_value = {y: x for x, y in _flag_by_text.items()} + + +def flags_to_text_set(flags): + """Convert a DNSKEY flags value to set texts + @rtype: set([string])""" + + flags_set = set() + mask = 0x1 + while mask <= 0x8000: + if flags & mask: + text = _flag_by_value.get(mask) + if not text: + text = hex(mask) + flags_set.add(text) + mask <<= 1 + return flags_set + + +def flags_from_text_set(texts_set): + """Convert set of DNSKEY flag mnemonic texts to DNSKEY flag value + @rtype: int""" + + flags = 0 + for text in texts_set: + try: + flags += _flag_by_text[text] + except KeyError: + raise NotImplementedError( + "DNSKEY flag '%s' is not supported" % text) + return flags + + +class DNSKEYBase(dns.rdata.Rdata): + + """Base class for rdata that is like a DNSKEY record + + @ivar flags: the key flags + @type flags: int + @ivar protocol: the protocol for which this key may be used + @type protocol: int + @ivar algorithm: the algorithm used for the key + @type algorithm: int + @ivar key: the public key + @type key: string""" + + __slots__ = ['flags', 'protocol', 'algorithm', 'key'] + + def __init__(self, rdclass, rdtype, flags, protocol, algorithm, key): + super(DNSKEYBase, self).__init__(rdclass, rdtype) + self.flags = flags + self.protocol = protocol + self.algorithm = algorithm + self.key = key + + def to_text(self, origin=None, relativize=True, **kw): + return '%d %d %d %s' % (self.flags, self.protocol, self.algorithm, + dns.rdata._base64ify(self.key)) + + @classmethod + def from_text(cls, rdclass, rdtype, tok, origin=None, relativize=True): + flags = tok.get_uint16() + protocol = tok.get_uint8() + algorithm = dns.dnssec.algorithm_from_text(tok.get_string()) + chunks = [] + while 1: + t = tok.get().unescape() + if t.is_eol_or_eof(): + break + if not t.is_identifier(): + raise dns.exception.SyntaxError + chunks.append(t.value.encode()) + b64 = b''.join(chunks) + key = base64.b64decode(b64) + return cls(rdclass, rdtype, flags, protocol, algorithm, key) + + def to_wire(self, file, compress=None, origin=None): + header = struct.pack("!HBB", self.flags, self.protocol, self.algorithm) + file.write(header) + file.write(self.key) + + @classmethod + def from_wire(cls, rdclass, rdtype, wire, current, rdlen, origin=None): + if rdlen < 4: + raise dns.exception.FormError + header = struct.unpack('!HBB', wire[current: current + 4]) + current += 4 + rdlen -= 4 + key = wire[current: current + rdlen].unwrap() + return cls(rdclass, rdtype, header[0], header[1], header[2], + key) + + def flags_to_text_set(self): + """Convert a DNSKEY flags value to set texts + @rtype: set([string])""" + return flags_to_text_set(self.flags) diff --git a/openpype/vendor/python/python_2/dns/rdtypes/dsbase.py b/openpype/vendor/python/python_2/dns/rdtypes/dsbase.py new file mode 100644 index 0000000000..26ae9d5c7d --- /dev/null +++ b/openpype/vendor/python/python_2/dns/rdtypes/dsbase.py @@ -0,0 +1,85 @@ +# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license + +# Copyright (C) 2010, 2011 Nominum, Inc. +# +# Permission to use, copy, modify, and distribute this software and its +# documentation for any purpose with or without fee is hereby granted, +# provided that the above copyright notice and this permission notice +# appear in all copies. +# +# THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES +# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR +# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT +# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. + +import struct +import binascii + +import dns.rdata +import dns.rdatatype + + +class DSBase(dns.rdata.Rdata): + + """Base class for rdata that is like a DS record + + @ivar key_tag: the key tag + @type key_tag: int + @ivar algorithm: the algorithm + @type algorithm: int + @ivar digest_type: the digest type + @type digest_type: int + @ivar digest: the digest + @type digest: int + @see: draft-ietf-dnsext-delegation-signer-14.txt""" + + __slots__ = ['key_tag', 'algorithm', 'digest_type', 'digest'] + + def __init__(self, rdclass, rdtype, key_tag, algorithm, digest_type, + digest): + super(DSBase, self).__init__(rdclass, rdtype) + self.key_tag = key_tag + self.algorithm = algorithm + self.digest_type = digest_type + self.digest = digest + + def to_text(self, origin=None, relativize=True, **kw): + return '%d %d %d %s' % (self.key_tag, self.algorithm, + self.digest_type, + dns.rdata._hexify(self.digest, + chunksize=128)) + + @classmethod + def from_text(cls, rdclass, rdtype, tok, origin=None, relativize=True): + key_tag = tok.get_uint16() + algorithm = tok.get_uint8() + digest_type = tok.get_uint8() + chunks = [] + while 1: + t = tok.get().unescape() + if t.is_eol_or_eof(): + break + if not t.is_identifier(): + raise dns.exception.SyntaxError + chunks.append(t.value.encode()) + digest = b''.join(chunks) + digest = binascii.unhexlify(digest) + return cls(rdclass, rdtype, key_tag, algorithm, digest_type, + digest) + + def to_wire(self, file, compress=None, origin=None): + header = struct.pack("!HBB", self.key_tag, self.algorithm, + self.digest_type) + file.write(header) + file.write(self.digest) + + @classmethod + def from_wire(cls, rdclass, rdtype, wire, current, rdlen, origin=None): + header = struct.unpack("!HBB", wire[current: current + 4]) + current += 4 + rdlen -= 4 + digest = wire[current: current + rdlen].unwrap() + return cls(rdclass, rdtype, header[0], header[1], header[2], digest) diff --git a/openpype/vendor/python/python_2/dns/rdtypes/euibase.py b/openpype/vendor/python/python_2/dns/rdtypes/euibase.py new file mode 100644 index 0000000000..cc5fdaa63b --- /dev/null +++ b/openpype/vendor/python/python_2/dns/rdtypes/euibase.py @@ -0,0 +1,71 @@ +# Copyright (C) 2015 Red Hat, Inc. +# Author: Petr Spacek +# +# Permission to use, copy, modify, and distribute this software and its +# documentation for any purpose with or without fee is hereby granted, +# provided that the above copyright notice and this permission notice +# appear in all copies. +# +# THE SOFTWARE IS PROVIDED 'AS IS' AND RED HAT DISCLAIMS ALL WARRANTIES +# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR +# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT +# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. + +import binascii + +import dns.rdata +from dns._compat import xrange + + +class EUIBase(dns.rdata.Rdata): + + """EUIxx record + + @ivar fingerprint: xx-bit Extended Unique Identifier (EUI-xx) + @type fingerprint: string + @see: rfc7043.txt""" + + __slots__ = ['eui'] + # define these in subclasses + # byte_len = 6 # 0123456789ab (in hex) + # text_len = byte_len * 3 - 1 # 01-23-45-67-89-ab + + def __init__(self, rdclass, rdtype, eui): + super(EUIBase, self).__init__(rdclass, rdtype) + if len(eui) != self.byte_len: + raise dns.exception.FormError('EUI%s rdata has to have %s bytes' + % (self.byte_len * 8, self.byte_len)) + self.eui = eui + + def to_text(self, origin=None, relativize=True, **kw): + return dns.rdata._hexify(self.eui, chunksize=2).replace(' ', '-') + + @classmethod + def from_text(cls, rdclass, rdtype, tok, origin=None, relativize=True): + text = tok.get_string() + tok.get_eol() + if len(text) != cls.text_len: + raise dns.exception.SyntaxError( + 'Input text must have %s characters' % cls.text_len) + expected_dash_idxs = xrange(2, cls.byte_len * 3 - 1, 3) + for i in expected_dash_idxs: + if text[i] != '-': + raise dns.exception.SyntaxError('Dash expected at position %s' + % i) + text = text.replace('-', '') + try: + data = binascii.unhexlify(text.encode()) + except (ValueError, TypeError) as ex: + raise dns.exception.SyntaxError('Hex decoding error: %s' % str(ex)) + return cls(rdclass, rdtype, data) + + def to_wire(self, file, compress=None, origin=None): + file.write(self.eui) + + @classmethod + def from_wire(cls, rdclass, rdtype, wire, current, rdlen, origin=None): + eui = wire[current:current + rdlen].unwrap() + return cls(rdclass, rdtype, eui) diff --git a/openpype/vendor/python/python_2/dns/rdtypes/mxbase.py b/openpype/vendor/python/python_2/dns/rdtypes/mxbase.py new file mode 100644 index 0000000000..9a3fa62360 --- /dev/null +++ b/openpype/vendor/python/python_2/dns/rdtypes/mxbase.py @@ -0,0 +1,103 @@ +# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license + +# Copyright (C) 2003-2007, 2009-2011 Nominum, Inc. +# +# Permission to use, copy, modify, and distribute this software and its +# documentation for any purpose with or without fee is hereby granted, +# provided that the above copyright notice and this permission notice +# appear in all copies. +# +# THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES +# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR +# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT +# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. + +"""MX-like base classes.""" + +from io import BytesIO +import struct + +import dns.exception +import dns.rdata +import dns.name + + +class MXBase(dns.rdata.Rdata): + + """Base class for rdata that is like an MX record. + + @ivar preference: the preference value + @type preference: int + @ivar exchange: the exchange name + @type exchange: dns.name.Name object""" + + __slots__ = ['preference', 'exchange'] + + def __init__(self, rdclass, rdtype, preference, exchange): + super(MXBase, self).__init__(rdclass, rdtype) + self.preference = preference + self.exchange = exchange + + def to_text(self, origin=None, relativize=True, **kw): + exchange = self.exchange.choose_relativity(origin, relativize) + return '%d %s' % (self.preference, exchange) + + @classmethod + def from_text(cls, rdclass, rdtype, tok, origin=None, relativize=True): + preference = tok.get_uint16() + exchange = tok.get_name() + exchange = exchange.choose_relativity(origin, relativize) + tok.get_eol() + return cls(rdclass, rdtype, preference, exchange) + + def to_wire(self, file, compress=None, origin=None): + pref = struct.pack("!H", self.preference) + file.write(pref) + self.exchange.to_wire(file, compress, origin) + + def to_digestable(self, origin=None): + return struct.pack("!H", self.preference) + \ + self.exchange.to_digestable(origin) + + @classmethod + def from_wire(cls, rdclass, rdtype, wire, current, rdlen, origin=None): + (preference, ) = struct.unpack('!H', wire[current: current + 2]) + current += 2 + rdlen -= 2 + (exchange, cused) = dns.name.from_wire(wire[: current + rdlen], + current) + if cused != rdlen: + raise dns.exception.FormError + if origin is not None: + exchange = exchange.relativize(origin) + return cls(rdclass, rdtype, preference, exchange) + + def choose_relativity(self, origin=None, relativize=True): + self.exchange = self.exchange.choose_relativity(origin, relativize) + + +class UncompressedMX(MXBase): + + """Base class for rdata that is like an MX record, but whose name + is not compressed when converted to DNS wire format, and whose + digestable form is not downcased.""" + + def to_wire(self, file, compress=None, origin=None): + super(UncompressedMX, self).to_wire(file, None, origin) + + def to_digestable(self, origin=None): + f = BytesIO() + self.to_wire(f, None, origin) + return f.getvalue() + + +class UncompressedDowncasingMX(MXBase): + + """Base class for rdata that is like an MX record, but whose name + is not compressed when convert to DNS wire format.""" + + def to_wire(self, file, compress=None, origin=None): + super(UncompressedDowncasingMX, self).to_wire(file, None, origin) diff --git a/openpype/vendor/python/python_2/dns/rdtypes/nsbase.py b/openpype/vendor/python/python_2/dns/rdtypes/nsbase.py new file mode 100644 index 0000000000..97a2232638 --- /dev/null +++ b/openpype/vendor/python/python_2/dns/rdtypes/nsbase.py @@ -0,0 +1,83 @@ +# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license + +# Copyright (C) 2003-2007, 2009-2011 Nominum, Inc. +# +# Permission to use, copy, modify, and distribute this software and its +# documentation for any purpose with or without fee is hereby granted, +# provided that the above copyright notice and this permission notice +# appear in all copies. +# +# THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES +# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR +# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT +# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. + +"""NS-like base classes.""" + +from io import BytesIO + +import dns.exception +import dns.rdata +import dns.name + + +class NSBase(dns.rdata.Rdata): + + """Base class for rdata that is like an NS record. + + @ivar target: the target name of the rdata + @type target: dns.name.Name object""" + + __slots__ = ['target'] + + def __init__(self, rdclass, rdtype, target): + super(NSBase, self).__init__(rdclass, rdtype) + self.target = target + + def to_text(self, origin=None, relativize=True, **kw): + target = self.target.choose_relativity(origin, relativize) + return str(target) + + @classmethod + def from_text(cls, rdclass, rdtype, tok, origin=None, relativize=True): + target = tok.get_name() + target = target.choose_relativity(origin, relativize) + tok.get_eol() + return cls(rdclass, rdtype, target) + + def to_wire(self, file, compress=None, origin=None): + self.target.to_wire(file, compress, origin) + + def to_digestable(self, origin=None): + return self.target.to_digestable(origin) + + @classmethod + def from_wire(cls, rdclass, rdtype, wire, current, rdlen, origin=None): + (target, cused) = dns.name.from_wire(wire[: current + rdlen], + current) + if cused != rdlen: + raise dns.exception.FormError + if origin is not None: + target = target.relativize(origin) + return cls(rdclass, rdtype, target) + + def choose_relativity(self, origin=None, relativize=True): + self.target = self.target.choose_relativity(origin, relativize) + + +class UncompressedNS(NSBase): + + """Base class for rdata that is like an NS record, but whose name + is not compressed when convert to DNS wire format, and whose + digestable form is not downcased.""" + + def to_wire(self, file, compress=None, origin=None): + super(UncompressedNS, self).to_wire(file, None, origin) + + def to_digestable(self, origin=None): + f = BytesIO() + self.to_wire(f, None, origin) + return f.getvalue() diff --git a/openpype/vendor/python/python_2/dns/rdtypes/txtbase.py b/openpype/vendor/python/python_2/dns/rdtypes/txtbase.py new file mode 100644 index 0000000000..645a57ecfc --- /dev/null +++ b/openpype/vendor/python/python_2/dns/rdtypes/txtbase.py @@ -0,0 +1,97 @@ +# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license + +# Copyright (C) 2006-2017 Nominum, Inc. +# +# Permission to use, copy, modify, and distribute this software and its +# documentation for any purpose with or without fee is hereby granted, +# provided that the above copyright notice and this permission notice +# appear in all copies. +# +# THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES +# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR +# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT +# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. + +"""TXT-like base class.""" + +import struct + +import dns.exception +import dns.rdata +import dns.tokenizer +from dns._compat import binary_type, string_types + + +class TXTBase(dns.rdata.Rdata): + + """Base class for rdata that is like a TXT record + + @ivar strings: the strings + @type strings: list of binary + @see: RFC 1035""" + + __slots__ = ['strings'] + + def __init__(self, rdclass, rdtype, strings): + super(TXTBase, self).__init__(rdclass, rdtype) + if isinstance(strings, binary_type) or \ + isinstance(strings, string_types): + strings = [strings] + self.strings = [] + for string in strings: + if isinstance(string, string_types): + string = string.encode() + self.strings.append(string) + + def to_text(self, origin=None, relativize=True, **kw): + txt = '' + prefix = '' + for s in self.strings: + txt += '{}"{}"'.format(prefix, dns.rdata._escapify(s)) + prefix = ' ' + return txt + + @classmethod + def from_text(cls, rdclass, rdtype, tok, origin=None, relativize=True): + strings = [] + while 1: + token = tok.get().unescape() + if token.is_eol_or_eof(): + break + if not (token.is_quoted_string() or token.is_identifier()): + raise dns.exception.SyntaxError("expected a string") + if len(token.value) > 255: + raise dns.exception.SyntaxError("string too long") + value = token.value + if isinstance(value, binary_type): + strings.append(value) + else: + strings.append(value.encode()) + if len(strings) == 0: + raise dns.exception.UnexpectedEnd + return cls(rdclass, rdtype, strings) + + def to_wire(self, file, compress=None, origin=None): + for s in self.strings: + l = len(s) + assert l < 256 + file.write(struct.pack('!B', l)) + file.write(s) + + @classmethod + def from_wire(cls, rdclass, rdtype, wire, current, rdlen, origin=None): + strings = [] + while rdlen > 0: + l = wire[current] + current += 1 + rdlen -= 1 + if l > rdlen: + raise dns.exception.FormError + s = wire[current: current + l].unwrap() + current += l + rdlen -= l + strings.append(s) + return cls(rdclass, rdtype, strings) diff --git a/openpype/vendor/python/python_2/dns/renderer.py b/openpype/vendor/python/python_2/dns/renderer.py new file mode 100644 index 0000000000..d7ef8c7f09 --- /dev/null +++ b/openpype/vendor/python/python_2/dns/renderer.py @@ -0,0 +1,291 @@ +# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license + +# Copyright (C) 2001-2017 Nominum, Inc. +# +# Permission to use, copy, modify, and distribute this software and its +# documentation for any purpose with or without fee is hereby granted, +# provided that the above copyright notice and this permission notice +# appear in all copies. +# +# THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES +# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR +# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT +# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. + +"""Help for building DNS wire format messages""" + +from io import BytesIO +import struct +import random +import time + +import dns.exception +import dns.tsig +from ._compat import long + + +QUESTION = 0 +ANSWER = 1 +AUTHORITY = 2 +ADDITIONAL = 3 + + +class Renderer(object): + """Helper class for building DNS wire-format messages. + + Most applications can use the higher-level L{dns.message.Message} + class and its to_wire() method to generate wire-format messages. + This class is for those applications which need finer control + over the generation of messages. + + Typical use:: + + r = dns.renderer.Renderer(id=1, flags=0x80, max_size=512) + r.add_question(qname, qtype, qclass) + r.add_rrset(dns.renderer.ANSWER, rrset_1) + r.add_rrset(dns.renderer.ANSWER, rrset_2) + r.add_rrset(dns.renderer.AUTHORITY, ns_rrset) + r.add_edns(0, 0, 4096) + r.add_rrset(dns.renderer.ADDTIONAL, ad_rrset_1) + r.add_rrset(dns.renderer.ADDTIONAL, ad_rrset_2) + r.write_header() + r.add_tsig(keyname, secret, 300, 1, 0, '', request_mac) + wire = r.get_wire() + + output, a BytesIO, where rendering is written + + id: the message id + + flags: the message flags + + max_size: the maximum size of the message + + origin: the origin to use when rendering relative names + + compress: the compression table + + section: an int, the section currently being rendered + + counts: list of the number of RRs in each section + + mac: the MAC of the rendered message (if TSIG was used) + """ + + def __init__(self, id=None, flags=0, max_size=65535, origin=None): + """Initialize a new renderer.""" + + self.output = BytesIO() + if id is None: + self.id = random.randint(0, 65535) + else: + self.id = id + self.flags = flags + self.max_size = max_size + self.origin = origin + self.compress = {} + self.section = QUESTION + self.counts = [0, 0, 0, 0] + self.output.write(b'\x00' * 12) + self.mac = '' + + def _rollback(self, where): + """Truncate the output buffer at offset *where*, and remove any + compression table entries that pointed beyond the truncation + point. + """ + + self.output.seek(where) + self.output.truncate() + keys_to_delete = [] + for k, v in self.compress.items(): + if v >= where: + keys_to_delete.append(k) + for k in keys_to_delete: + del self.compress[k] + + def _set_section(self, section): + """Set the renderer's current section. + + Sections must be rendered order: QUESTION, ANSWER, AUTHORITY, + ADDITIONAL. Sections may be empty. + + Raises dns.exception.FormError if an attempt was made to set + a section value less than the current section. + """ + + if self.section != section: + if self.section > section: + raise dns.exception.FormError + self.section = section + + def add_question(self, qname, rdtype, rdclass=dns.rdataclass.IN): + """Add a question to the message.""" + + self._set_section(QUESTION) + before = self.output.tell() + qname.to_wire(self.output, self.compress, self.origin) + self.output.write(struct.pack("!HH", rdtype, rdclass)) + after = self.output.tell() + if after >= self.max_size: + self._rollback(before) + raise dns.exception.TooBig + self.counts[QUESTION] += 1 + + def add_rrset(self, section, rrset, **kw): + """Add the rrset to the specified section. + + Any keyword arguments are passed on to the rdataset's to_wire() + routine. + """ + + self._set_section(section) + before = self.output.tell() + n = rrset.to_wire(self.output, self.compress, self.origin, **kw) + after = self.output.tell() + if after >= self.max_size: + self._rollback(before) + raise dns.exception.TooBig + self.counts[section] += n + + def add_rdataset(self, section, name, rdataset, **kw): + """Add the rdataset to the specified section, using the specified + name as the owner name. + + Any keyword arguments are passed on to the rdataset's to_wire() + routine. + """ + + self._set_section(section) + before = self.output.tell() + n = rdataset.to_wire(name, self.output, self.compress, self.origin, + **kw) + after = self.output.tell() + if after >= self.max_size: + self._rollback(before) + raise dns.exception.TooBig + self.counts[section] += n + + def add_edns(self, edns, ednsflags, payload, options=None): + """Add an EDNS OPT record to the message.""" + + # make sure the EDNS version in ednsflags agrees with edns + ednsflags &= long(0xFF00FFFF) + ednsflags |= (edns << 16) + self._set_section(ADDITIONAL) + before = self.output.tell() + self.output.write(struct.pack('!BHHIH', 0, dns.rdatatype.OPT, payload, + ednsflags, 0)) + if options is not None: + lstart = self.output.tell() + for opt in options: + stuff = struct.pack("!HH", opt.otype, 0) + self.output.write(stuff) + start = self.output.tell() + opt.to_wire(self.output) + end = self.output.tell() + assert end - start < 65536 + self.output.seek(start - 2) + stuff = struct.pack("!H", end - start) + self.output.write(stuff) + self.output.seek(0, 2) + lend = self.output.tell() + assert lend - lstart < 65536 + self.output.seek(lstart - 2) + stuff = struct.pack("!H", lend - lstart) + self.output.write(stuff) + self.output.seek(0, 2) + after = self.output.tell() + if after >= self.max_size: + self._rollback(before) + raise dns.exception.TooBig + self.counts[ADDITIONAL] += 1 + + def add_tsig(self, keyname, secret, fudge, id, tsig_error, other_data, + request_mac, algorithm=dns.tsig.default_algorithm): + """Add a TSIG signature to the message.""" + + s = self.output.getvalue() + (tsig_rdata, self.mac, ctx) = dns.tsig.sign(s, + keyname, + secret, + int(time.time()), + fudge, + id, + tsig_error, + other_data, + request_mac, + algorithm=algorithm) + self._write_tsig(tsig_rdata, keyname) + + def add_multi_tsig(self, ctx, keyname, secret, fudge, id, tsig_error, + other_data, request_mac, + algorithm=dns.tsig.default_algorithm): + """Add a TSIG signature to the message. Unlike add_tsig(), this can be + used for a series of consecutive DNS envelopes, e.g. for a zone + transfer over TCP [RFC2845, 4.4]. + + For the first message in the sequence, give ctx=None. For each + subsequent message, give the ctx that was returned from the + add_multi_tsig() call for the previous message.""" + + s = self.output.getvalue() + (tsig_rdata, self.mac, ctx) = dns.tsig.sign(s, + keyname, + secret, + int(time.time()), + fudge, + id, + tsig_error, + other_data, + request_mac, + ctx=ctx, + first=ctx is None, + multi=True, + algorithm=algorithm) + self._write_tsig(tsig_rdata, keyname) + return ctx + + def _write_tsig(self, tsig_rdata, keyname): + self._set_section(ADDITIONAL) + before = self.output.tell() + + keyname.to_wire(self.output, self.compress, self.origin) + self.output.write(struct.pack('!HHIH', dns.rdatatype.TSIG, + dns.rdataclass.ANY, 0, 0)) + rdata_start = self.output.tell() + self.output.write(tsig_rdata) + + after = self.output.tell() + assert after - rdata_start < 65536 + if after >= self.max_size: + self._rollback(before) + raise dns.exception.TooBig + + self.output.seek(rdata_start - 2) + self.output.write(struct.pack('!H', after - rdata_start)) + self.counts[ADDITIONAL] += 1 + self.output.seek(10) + self.output.write(struct.pack('!H', self.counts[ADDITIONAL])) + self.output.seek(0, 2) + + def write_header(self): + """Write the DNS message header. + + Writing the DNS message header is done after all sections + have been rendered, but before the optional TSIG signature + is added. + """ + + self.output.seek(0) + self.output.write(struct.pack('!HHHHHH', self.id, self.flags, + self.counts[0], self.counts[1], + self.counts[2], self.counts[3])) + self.output.seek(0, 2) + + def get_wire(self): + """Return the wire format message.""" + + return self.output.getvalue() diff --git a/openpype/vendor/python/python_2/dns/resolver.py b/openpype/vendor/python/python_2/dns/resolver.py new file mode 100644 index 0000000000..806e5b2b45 --- /dev/null +++ b/openpype/vendor/python/python_2/dns/resolver.py @@ -0,0 +1,1383 @@ +# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license + +# Copyright (C) 2003-2017 Nominum, Inc. +# +# Permission to use, copy, modify, and distribute this software and its +# documentation for any purpose with or without fee is hereby granted, +# provided that the above copyright notice and this permission notice +# appear in all copies. +# +# THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES +# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR +# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT +# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. + +"""DNS stub resolver.""" + +import socket +import sys +import time +import random + +try: + import threading as _threading +except ImportError: + import dummy_threading as _threading + +import dns.exception +import dns.flags +import dns.ipv4 +import dns.ipv6 +import dns.message +import dns.name +import dns.query +import dns.rcode +import dns.rdataclass +import dns.rdatatype +import dns.reversename +import dns.tsig +from ._compat import xrange, string_types + +if sys.platform == 'win32': + try: + import winreg as _winreg + except ImportError: + import _winreg # pylint: disable=import-error + +class NXDOMAIN(dns.exception.DNSException): + """The DNS query name does not exist.""" + supp_kwargs = {'qnames', 'responses'} + fmt = None # we have our own __str__ implementation + + def _check_kwargs(self, qnames, responses=None): + if not isinstance(qnames, (list, tuple, set)): + raise AttributeError("qnames must be a list, tuple or set") + if len(qnames) == 0: + raise AttributeError("qnames must contain at least one element") + if responses is None: + responses = {} + elif not isinstance(responses, dict): + raise AttributeError("responses must be a dict(qname=response)") + kwargs = dict(qnames=qnames, responses=responses) + return kwargs + + def __str__(self): + if 'qnames' not in self.kwargs: + return super(NXDOMAIN, self).__str__() + qnames = self.kwargs['qnames'] + if len(qnames) > 1: + msg = 'None of DNS query names exist' + else: + msg = 'The DNS query name does not exist' + qnames = ', '.join(map(str, qnames)) + return "{}: {}".format(msg, qnames) + + def canonical_name(self): + if not 'qnames' in self.kwargs: + raise TypeError("parametrized exception required") + IN = dns.rdataclass.IN + CNAME = dns.rdatatype.CNAME + cname = None + for qname in self.kwargs['qnames']: + response = self.kwargs['responses'][qname] + for answer in response.answer: + if answer.rdtype != CNAME or answer.rdclass != IN: + continue + cname = answer.items[0].target.to_text() + if cname is not None: + return dns.name.from_text(cname) + return self.kwargs['qnames'][0] + canonical_name = property(canonical_name, doc=( + "Return the unresolved canonical name.")) + + def __add__(self, e_nx): + """Augment by results from another NXDOMAIN exception.""" + qnames0 = list(self.kwargs.get('qnames', [])) + responses0 = dict(self.kwargs.get('responses', {})) + responses1 = e_nx.kwargs.get('responses', {}) + for qname1 in e_nx.kwargs.get('qnames', []): + if qname1 not in qnames0: + qnames0.append(qname1) + if qname1 in responses1: + responses0[qname1] = responses1[qname1] + return NXDOMAIN(qnames=qnames0, responses=responses0) + + def qnames(self): + """All of the names that were tried. + + Returns a list of ``dns.name.Name``. + """ + return self.kwargs['qnames'] + + def responses(self): + """A map from queried names to their NXDOMAIN responses. + + Returns a dict mapping a ``dns.name.Name`` to a + ``dns.message.Message``. + """ + return self.kwargs['responses'] + + def response(self, qname): + """The response for query *qname*. + + Returns a ``dns.message.Message``. + """ + return self.kwargs['responses'][qname] + + +class YXDOMAIN(dns.exception.DNSException): + """The DNS query name is too long after DNAME substitution.""" + +# The definition of the Timeout exception has moved from here to the +# dns.exception module. We keep dns.resolver.Timeout defined for +# backwards compatibility. + +Timeout = dns.exception.Timeout + + +class NoAnswer(dns.exception.DNSException): + """The DNS response does not contain an answer to the question.""" + fmt = 'The DNS response does not contain an answer ' + \ + 'to the question: {query}' + supp_kwargs = {'response'} + + def _fmt_kwargs(self, **kwargs): + return super(NoAnswer, self)._fmt_kwargs( + query=kwargs['response'].question) + + +class NoNameservers(dns.exception.DNSException): + """All nameservers failed to answer the query. + + errors: list of servers and respective errors + The type of errors is + [(server IP address, any object convertible to string)]. + Non-empty errors list will add explanatory message () + """ + + msg = "All nameservers failed to answer the query." + fmt = "%s {query}: {errors}" % msg[:-1] + supp_kwargs = {'request', 'errors'} + + def _fmt_kwargs(self, **kwargs): + srv_msgs = [] + for err in kwargs['errors']: + srv_msgs.append('Server {} {} port {} answered {}'.format(err[0], + 'TCP' if err[1] else 'UDP', err[2], err[3])) + return super(NoNameservers, self)._fmt_kwargs( + query=kwargs['request'].question, errors='; '.join(srv_msgs)) + + +class NotAbsolute(dns.exception.DNSException): + """An absolute domain name is required but a relative name was provided.""" + + +class NoRootSOA(dns.exception.DNSException): + """There is no SOA RR at the DNS root name. This should never happen!""" + + +class NoMetaqueries(dns.exception.DNSException): + """DNS metaqueries are not allowed.""" + + +class Answer(object): + """DNS stub resolver answer. + + Instances of this class bundle up the result of a successful DNS + resolution. + + For convenience, the answer object implements much of the sequence + protocol, forwarding to its ``rrset`` attribute. E.g. + ``for a in answer`` is equivalent to ``for a in answer.rrset``. + ``answer[i]`` is equivalent to ``answer.rrset[i]``, and + ``answer[i:j]`` is equivalent to ``answer.rrset[i:j]``. + + Note that CNAMEs or DNAMEs in the response may mean that answer + RRset's name might not be the query name. + """ + + def __init__(self, qname, rdtype, rdclass, response, + raise_on_no_answer=True): + self.qname = qname + self.rdtype = rdtype + self.rdclass = rdclass + self.response = response + min_ttl = -1 + rrset = None + for count in xrange(0, 15): + try: + rrset = response.find_rrset(response.answer, qname, + rdclass, rdtype) + if min_ttl == -1 or rrset.ttl < min_ttl: + min_ttl = rrset.ttl + break + except KeyError: + if rdtype != dns.rdatatype.CNAME: + try: + crrset = response.find_rrset(response.answer, + qname, + rdclass, + dns.rdatatype.CNAME) + if min_ttl == -1 or crrset.ttl < min_ttl: + min_ttl = crrset.ttl + for rd in crrset: + qname = rd.target + break + continue + except KeyError: + if raise_on_no_answer: + raise NoAnswer(response=response) + if raise_on_no_answer: + raise NoAnswer(response=response) + if rrset is None and raise_on_no_answer: + raise NoAnswer(response=response) + self.canonical_name = qname + self.rrset = rrset + if rrset is None: + while 1: + # Look for a SOA RR whose owner name is a superdomain + # of qname. + try: + srrset = response.find_rrset(response.authority, qname, + rdclass, dns.rdatatype.SOA) + if min_ttl == -1 or srrset.ttl < min_ttl: + min_ttl = srrset.ttl + if srrset[0].minimum < min_ttl: + min_ttl = srrset[0].minimum + break + except KeyError: + try: + qname = qname.parent() + except dns.name.NoParent: + break + self.expiration = time.time() + min_ttl + + def __getattr__(self, attr): + if attr == 'name': + return self.rrset.name + elif attr == 'ttl': + return self.rrset.ttl + elif attr == 'covers': + return self.rrset.covers + elif attr == 'rdclass': + return self.rrset.rdclass + elif attr == 'rdtype': + return self.rrset.rdtype + else: + raise AttributeError(attr) + + def __len__(self): + return self.rrset and len(self.rrset) or 0 + + def __iter__(self): + return self.rrset and iter(self.rrset) or iter(tuple()) + + def __getitem__(self, i): + if self.rrset is None: + raise IndexError + return self.rrset[i] + + def __delitem__(self, i): + if self.rrset is None: + raise IndexError + del self.rrset[i] + + +class Cache(object): + """Simple thread-safe DNS answer cache.""" + + def __init__(self, cleaning_interval=300.0): + """*cleaning_interval*, a ``float`` is the number of seconds between + periodic cleanings. + """ + + self.data = {} + self.cleaning_interval = cleaning_interval + self.next_cleaning = time.time() + self.cleaning_interval + self.lock = _threading.Lock() + + def _maybe_clean(self): + """Clean the cache if it's time to do so.""" + + now = time.time() + if self.next_cleaning <= now: + keys_to_delete = [] + for (k, v) in self.data.items(): + if v.expiration <= now: + keys_to_delete.append(k) + for k in keys_to_delete: + del self.data[k] + now = time.time() + self.next_cleaning = now + self.cleaning_interval + + def get(self, key): + """Get the answer associated with *key*. + + Returns None if no answer is cached for the key. + + *key*, a ``(dns.name.Name, int, int)`` tuple whose values are the + query name, rdtype, and rdclass respectively. + + Returns a ``dns.resolver.Answer`` or ``None``. + """ + + try: + self.lock.acquire() + self._maybe_clean() + v = self.data.get(key) + if v is None or v.expiration <= time.time(): + return None + return v + finally: + self.lock.release() + + def put(self, key, value): + """Associate key and value in the cache. + + *key*, a ``(dns.name.Name, int, int)`` tuple whose values are the + query name, rdtype, and rdclass respectively. + + *value*, a ``dns.resolver.Answer``, the answer. + """ + + try: + self.lock.acquire() + self._maybe_clean() + self.data[key] = value + finally: + self.lock.release() + + def flush(self, key=None): + """Flush the cache. + + If *key* is not ``None``, only that item is flushed. Otherwise + the entire cache is flushed. + + *key*, a ``(dns.name.Name, int, int)`` tuple whose values are the + query name, rdtype, and rdclass respectively. + """ + + try: + self.lock.acquire() + if key is not None: + if key in self.data: + del self.data[key] + else: + self.data = {} + self.next_cleaning = time.time() + self.cleaning_interval + finally: + self.lock.release() + + +class LRUCacheNode(object): + """LRUCache node.""" + + def __init__(self, key, value): + self.key = key + self.value = value + self.prev = self + self.next = self + + def link_before(self, node): + self.prev = node.prev + self.next = node + node.prev.next = self + node.prev = self + + def link_after(self, node): + self.prev = node + self.next = node.next + node.next.prev = self + node.next = self + + def unlink(self): + self.next.prev = self.prev + self.prev.next = self.next + + +class LRUCache(object): + """Thread-safe, bounded, least-recently-used DNS answer cache. + + This cache is better than the simple cache (above) if you're + running a web crawler or other process that does a lot of + resolutions. The LRUCache has a maximum number of nodes, and when + it is full, the least-recently used node is removed to make space + for a new one. + """ + + def __init__(self, max_size=100000): + """*max_size*, an ``int``, is the maximum number of nodes to cache; + it must be greater than 0. + """ + + self.data = {} + self.set_max_size(max_size) + self.sentinel = LRUCacheNode(None, None) + self.lock = _threading.Lock() + + def set_max_size(self, max_size): + if max_size < 1: + max_size = 1 + self.max_size = max_size + + def get(self, key): + """Get the answer associated with *key*. + + Returns None if no answer is cached for the key. + + *key*, a ``(dns.name.Name, int, int)`` tuple whose values are the + query name, rdtype, and rdclass respectively. + + Returns a ``dns.resolver.Answer`` or ``None``. + """ + + try: + self.lock.acquire() + node = self.data.get(key) + if node is None: + return None + # Unlink because we're either going to move the node to the front + # of the LRU list or we're going to free it. + node.unlink() + if node.value.expiration <= time.time(): + del self.data[node.key] + return None + node.link_after(self.sentinel) + return node.value + finally: + self.lock.release() + + def put(self, key, value): + """Associate key and value in the cache. + + *key*, a ``(dns.name.Name, int, int)`` tuple whose values are the + query name, rdtype, and rdclass respectively. + + *value*, a ``dns.resolver.Answer``, the answer. + """ + + try: + self.lock.acquire() + node = self.data.get(key) + if node is not None: + node.unlink() + del self.data[node.key] + while len(self.data) >= self.max_size: + node = self.sentinel.prev + node.unlink() + del self.data[node.key] + node = LRUCacheNode(key, value) + node.link_after(self.sentinel) + self.data[key] = node + finally: + self.lock.release() + + def flush(self, key=None): + """Flush the cache. + + If *key* is not ``None``, only that item is flushed. Otherwise + the entire cache is flushed. + + *key*, a ``(dns.name.Name, int, int)`` tuple whose values are the + query name, rdtype, and rdclass respectively. + """ + + try: + self.lock.acquire() + if key is not None: + node = self.data.get(key) + if node is not None: + node.unlink() + del self.data[node.key] + else: + node = self.sentinel.next + while node != self.sentinel: + next = node.next + node.prev = None + node.next = None + node = next + self.data = {} + finally: + self.lock.release() + + +class Resolver(object): + """DNS stub resolver.""" + + def __init__(self, filename='/etc/resolv.conf', configure=True): + """*filename*, a ``text`` or file object, specifying a file + in standard /etc/resolv.conf format. This parameter is meaningful + only when *configure* is true and the platform is POSIX. + + *configure*, a ``bool``. If True (the default), the resolver + instance is configured in the normal fashion for the operating + system the resolver is running on. (I.e. by reading a + /etc/resolv.conf file on POSIX systems and from the registry + on Windows systems.) + """ + + self.domain = None + self.nameservers = None + self.nameserver_ports = None + self.port = None + self.search = None + self.timeout = None + self.lifetime = None + self.keyring = None + self.keyname = None + self.keyalgorithm = None + self.edns = None + self.ednsflags = None + self.payload = None + self.cache = None + self.flags = None + self.retry_servfail = False + self.rotate = False + + self.reset() + if configure: + if sys.platform == 'win32': + self.read_registry() + elif filename: + self.read_resolv_conf(filename) + + def reset(self): + """Reset all resolver configuration to the defaults.""" + + self.domain = \ + dns.name.Name(dns.name.from_text(socket.gethostname())[1:]) + if len(self.domain) == 0: + self.domain = dns.name.root + self.nameservers = [] + self.nameserver_ports = {} + self.port = 53 + self.search = [] + self.timeout = 2.0 + self.lifetime = 30.0 + self.keyring = None + self.keyname = None + self.keyalgorithm = dns.tsig.default_algorithm + self.edns = -1 + self.ednsflags = 0 + self.payload = 0 + self.cache = None + self.flags = None + self.retry_servfail = False + self.rotate = False + + def read_resolv_conf(self, f): + """Process *f* as a file in the /etc/resolv.conf format. If f is + a ``text``, it is used as the name of the file to open; otherwise it + is treated as the file itself.""" + + if isinstance(f, string_types): + try: + f = open(f, 'r') + except IOError: + # /etc/resolv.conf doesn't exist, can't be read, etc. + # We'll just use the default resolver configuration. + self.nameservers = ['127.0.0.1'] + return + want_close = True + else: + want_close = False + try: + for l in f: + if len(l) == 0 or l[0] == '#' or l[0] == ';': + continue + tokens = l.split() + + # Any line containing less than 2 tokens is malformed + if len(tokens) < 2: + continue + + if tokens[0] == 'nameserver': + self.nameservers.append(tokens[1]) + elif tokens[0] == 'domain': + self.domain = dns.name.from_text(tokens[1]) + elif tokens[0] == 'search': + for suffix in tokens[1:]: + self.search.append(dns.name.from_text(suffix)) + elif tokens[0] == 'options': + if 'rotate' in tokens[1:]: + self.rotate = True + finally: + if want_close: + f.close() + if len(self.nameservers) == 0: + self.nameservers.append('127.0.0.1') + + def _determine_split_char(self, entry): + # + # The windows registry irritatingly changes the list element + # delimiter in between ' ' and ',' (and vice-versa) in various + # versions of windows. + # + if entry.find(' ') >= 0: + split_char = ' ' + elif entry.find(',') >= 0: + split_char = ',' + else: + # probably a singleton; treat as a space-separated list. + split_char = ' ' + return split_char + + def _config_win32_nameservers(self, nameservers): + # we call str() on nameservers to convert it from unicode to ascii + nameservers = str(nameservers) + split_char = self._determine_split_char(nameservers) + ns_list = nameservers.split(split_char) + for ns in ns_list: + if ns not in self.nameservers: + self.nameservers.append(ns) + + def _config_win32_domain(self, domain): + # we call str() on domain to convert it from unicode to ascii + self.domain = dns.name.from_text(str(domain)) + + def _config_win32_search(self, search): + # we call str() on search to convert it from unicode to ascii + search = str(search) + split_char = self._determine_split_char(search) + search_list = search.split(split_char) + for s in search_list: + if s not in self.search: + self.search.append(dns.name.from_text(s)) + + def _config_win32_fromkey(self, key, always_try_domain): + try: + servers, rtype = _winreg.QueryValueEx(key, 'NameServer') + except WindowsError: # pylint: disable=undefined-variable + servers = None + if servers: + self._config_win32_nameservers(servers) + if servers or always_try_domain: + try: + dom, rtype = _winreg.QueryValueEx(key, 'Domain') + if dom: + self._config_win32_domain(dom) + except WindowsError: # pylint: disable=undefined-variable + pass + else: + try: + servers, rtype = _winreg.QueryValueEx(key, 'DhcpNameServer') + except WindowsError: # pylint: disable=undefined-variable + servers = None + if servers: + self._config_win32_nameservers(servers) + try: + dom, rtype = _winreg.QueryValueEx(key, 'DhcpDomain') + if dom: + self._config_win32_domain(dom) + except WindowsError: # pylint: disable=undefined-variable + pass + try: + search, rtype = _winreg.QueryValueEx(key, 'SearchList') + except WindowsError: # pylint: disable=undefined-variable + search = None + if search: + self._config_win32_search(search) + + def read_registry(self): + """Extract resolver configuration from the Windows registry.""" + + lm = _winreg.ConnectRegistry(None, _winreg.HKEY_LOCAL_MACHINE) + want_scan = False + try: + try: + # XP, 2000 + tcp_params = _winreg.OpenKey(lm, + r'SYSTEM\CurrentControlSet' + r'\Services\Tcpip\Parameters') + want_scan = True + except EnvironmentError: + # ME + tcp_params = _winreg.OpenKey(lm, + r'SYSTEM\CurrentControlSet' + r'\Services\VxD\MSTCP') + try: + self._config_win32_fromkey(tcp_params, True) + finally: + tcp_params.Close() + if want_scan: + interfaces = _winreg.OpenKey(lm, + r'SYSTEM\CurrentControlSet' + r'\Services\Tcpip\Parameters' + r'\Interfaces') + try: + i = 0 + while True: + try: + guid = _winreg.EnumKey(interfaces, i) + i += 1 + key = _winreg.OpenKey(interfaces, guid) + if not self._win32_is_nic_enabled(lm, guid, key): + continue + try: + self._config_win32_fromkey(key, False) + finally: + key.Close() + except EnvironmentError: + break + finally: + interfaces.Close() + finally: + lm.Close() + + def _win32_is_nic_enabled(self, lm, guid, interface_key): + # Look in the Windows Registry to determine whether the network + # interface corresponding to the given guid is enabled. + # + # (Code contributed by Paul Marks, thanks!) + # + try: + # This hard-coded location seems to be consistent, at least + # from Windows 2000 through Vista. + connection_key = _winreg.OpenKey( + lm, + r'SYSTEM\CurrentControlSet\Control\Network' + r'\{4D36E972-E325-11CE-BFC1-08002BE10318}' + r'\%s\Connection' % guid) + + try: + # The PnpInstanceID points to a key inside Enum + (pnp_id, ttype) = _winreg.QueryValueEx( + connection_key, 'PnpInstanceID') + + if ttype != _winreg.REG_SZ: + raise ValueError + + device_key = _winreg.OpenKey( + lm, r'SYSTEM\CurrentControlSet\Enum\%s' % pnp_id) + + try: + # Get ConfigFlags for this device + (flags, ttype) = _winreg.QueryValueEx( + device_key, 'ConfigFlags') + + if ttype != _winreg.REG_DWORD: + raise ValueError + + # Based on experimentation, bit 0x1 indicates that the + # device is disabled. + return not flags & 0x1 + + finally: + device_key.Close() + finally: + connection_key.Close() + except (EnvironmentError, ValueError): + # Pre-vista, enabled interfaces seem to have a non-empty + # NTEContextList; this was how dnspython detected enabled + # nics before the code above was contributed. We've retained + # the old method since we don't know if the code above works + # on Windows 95/98/ME. + try: + (nte, ttype) = _winreg.QueryValueEx(interface_key, + 'NTEContextList') + return nte is not None + except WindowsError: # pylint: disable=undefined-variable + return False + + def _compute_timeout(self, start, lifetime=None): + lifetime = self.lifetime if lifetime is None else lifetime + now = time.time() + duration = now - start + if duration < 0: + if duration < -1: + # Time going backwards is bad. Just give up. + raise Timeout(timeout=duration) + else: + # Time went backwards, but only a little. This can + # happen, e.g. under vmware with older linux kernels. + # Pretend it didn't happen. + now = start + if duration >= lifetime: + raise Timeout(timeout=duration) + return min(lifetime - duration, self.timeout) + + def query(self, qname, rdtype=dns.rdatatype.A, rdclass=dns.rdataclass.IN, + tcp=False, source=None, raise_on_no_answer=True, source_port=0, + lifetime=None): + """Query nameservers to find the answer to the question. + + The *qname*, *rdtype*, and *rdclass* parameters may be objects + of the appropriate type, or strings that can be converted into objects + of the appropriate type. + + *qname*, a ``dns.name.Name`` or ``text``, the query name. + + *rdtype*, an ``int`` or ``text``, the query type. + + *rdclass*, an ``int`` or ``text``, the query class. + + *tcp*, a ``bool``. If ``True``, use TCP to make the query. + + *source*, a ``text`` or ``None``. If not ``None``, bind to this IP + address when making queries. + + *raise_on_no_answer*, a ``bool``. If ``True``, raise + ``dns.resolver.NoAnswer`` if there's no answer to the question. + + *source_port*, an ``int``, the port from which to send the message. + + *lifetime*, a ``float``, how long query should run before timing out. + + Raises ``dns.exception.Timeout`` if no answers could be found + in the specified lifetime. + + Raises ``dns.resolver.NXDOMAIN`` if the query name does not exist. + + Raises ``dns.resolver.YXDOMAIN`` if the query name is too long after + DNAME substitution. + + Raises ``dns.resolver.NoAnswer`` if *raise_on_no_answer* is + ``True`` and the query name exists but has no RRset of the + desired type and class. + + Raises ``dns.resolver.NoNameservers`` if no non-broken + nameservers are available to answer the question. + + Returns a ``dns.resolver.Answer`` instance. + """ + + if isinstance(qname, string_types): + qname = dns.name.from_text(qname, None) + if isinstance(rdtype, string_types): + rdtype = dns.rdatatype.from_text(rdtype) + if dns.rdatatype.is_metatype(rdtype): + raise NoMetaqueries + if isinstance(rdclass, string_types): + rdclass = dns.rdataclass.from_text(rdclass) + if dns.rdataclass.is_metaclass(rdclass): + raise NoMetaqueries + qnames_to_try = [] + if qname.is_absolute(): + qnames_to_try.append(qname) + else: + if len(qname) > 1: + qnames_to_try.append(qname.concatenate(dns.name.root)) + if self.search: + for suffix in self.search: + qnames_to_try.append(qname.concatenate(suffix)) + else: + qnames_to_try.append(qname.concatenate(self.domain)) + all_nxdomain = True + nxdomain_responses = {} + start = time.time() + _qname = None # make pylint happy + for _qname in qnames_to_try: + if self.cache: + answer = self.cache.get((_qname, rdtype, rdclass)) + if answer is not None: + if answer.rrset is None and raise_on_no_answer: + raise NoAnswer(response=answer.response) + else: + return answer + request = dns.message.make_query(_qname, rdtype, rdclass) + if self.keyname is not None: + request.use_tsig(self.keyring, self.keyname, + algorithm=self.keyalgorithm) + request.use_edns(self.edns, self.ednsflags, self.payload) + if self.flags is not None: + request.flags = self.flags + response = None + # + # make a copy of the servers list so we can alter it later. + # + nameservers = self.nameservers[:] + errors = [] + if self.rotate: + random.shuffle(nameservers) + backoff = 0.10 + while response is None: + if len(nameservers) == 0: + raise NoNameservers(request=request, errors=errors) + for nameserver in nameservers[:]: + timeout = self._compute_timeout(start, lifetime) + port = self.nameserver_ports.get(nameserver, self.port) + try: + tcp_attempt = tcp + if tcp: + response = dns.query.tcp(request, nameserver, + timeout, port, + source=source, + source_port=source_port) + else: + response = dns.query.udp(request, nameserver, + timeout, port, + source=source, + source_port=source_port) + if response.flags & dns.flags.TC: + # Response truncated; retry with TCP. + tcp_attempt = True + timeout = self._compute_timeout(start, lifetime) + response = \ + dns.query.tcp(request, nameserver, + timeout, port, + source=source, + source_port=source_port) + except (socket.error, dns.exception.Timeout) as ex: + # + # Communication failure or timeout. Go to the + # next server + # + errors.append((nameserver, tcp_attempt, port, ex, + response)) + response = None + continue + except dns.query.UnexpectedSource as ex: + # + # Who knows? Keep going. + # + errors.append((nameserver, tcp_attempt, port, ex, + response)) + response = None + continue + except dns.exception.FormError as ex: + # + # We don't understand what this server is + # saying. Take it out of the mix and + # continue. + # + nameservers.remove(nameserver) + errors.append((nameserver, tcp_attempt, port, ex, + response)) + response = None + continue + except EOFError as ex: + # + # We're using TCP and they hung up on us. + # Probably they don't support TCP (though + # they're supposed to!). Take it out of the + # mix and continue. + # + nameservers.remove(nameserver) + errors.append((nameserver, tcp_attempt, port, ex, + response)) + response = None + continue + rcode = response.rcode() + if rcode == dns.rcode.YXDOMAIN: + ex = YXDOMAIN() + errors.append((nameserver, tcp_attempt, port, ex, + response)) + raise ex + if rcode == dns.rcode.NOERROR or \ + rcode == dns.rcode.NXDOMAIN: + break + # + # We got a response, but we're not happy with the + # rcode in it. Remove the server from the mix if + # the rcode isn't SERVFAIL. + # + if rcode != dns.rcode.SERVFAIL or not self.retry_servfail: + nameservers.remove(nameserver) + errors.append((nameserver, tcp_attempt, port, + dns.rcode.to_text(rcode), response)) + response = None + if response is not None: + break + # + # All nameservers failed! + # + if len(nameservers) > 0: + # + # But we still have servers to try. Sleep a bit + # so we don't pound them! + # + timeout = self._compute_timeout(start, lifetime) + sleep_time = min(timeout, backoff) + backoff *= 2 + time.sleep(sleep_time) + if response.rcode() == dns.rcode.NXDOMAIN: + nxdomain_responses[_qname] = response + continue + all_nxdomain = False + break + if all_nxdomain: + raise NXDOMAIN(qnames=qnames_to_try, responses=nxdomain_responses) + answer = Answer(_qname, rdtype, rdclass, response, + raise_on_no_answer) + if self.cache: + self.cache.put((_qname, rdtype, rdclass), answer) + return answer + + def use_tsig(self, keyring, keyname=None, + algorithm=dns.tsig.default_algorithm): + """Add a TSIG signature to the query. + + See the documentation of the Message class for a complete + description of the keyring dictionary. + + *keyring*, a ``dict``, the TSIG keyring to use. If a + *keyring* is specified but a *keyname* is not, then the key + used will be the first key in the *keyring*. Note that the + order of keys in a dictionary is not defined, so applications + should supply a keyname when a keyring is used, unless they + know the keyring contains only one key. + + *keyname*, a ``dns.name.Name`` or ``None``, the name of the TSIG key + to use; defaults to ``None``. The key must be defined in the keyring. + + *algorithm*, a ``dns.name.Name``, the TSIG algorithm to use. + """ + + self.keyring = keyring + if keyname is None: + self.keyname = list(self.keyring.keys())[0] + else: + self.keyname = keyname + self.keyalgorithm = algorithm + + def use_edns(self, edns, ednsflags, payload): + """Configure EDNS behavior. + + *edns*, an ``int``, is the EDNS level to use. Specifying + ``None``, ``False``, or ``-1`` means "do not use EDNS", and in this case + the other parameters are ignored. Specifying ``True`` is + equivalent to specifying 0, i.e. "use EDNS0". + + *ednsflags*, an ``int``, the EDNS flag values. + + *payload*, an ``int``, is the EDNS sender's payload field, which is the + maximum size of UDP datagram the sender can handle. I.e. how big + a response to this message can be. + """ + + if edns is None: + edns = -1 + self.edns = edns + self.ednsflags = ednsflags + self.payload = payload + + def set_flags(self, flags): + """Overrides the default flags with your own. + + *flags*, an ``int``, the message flags to use. + """ + + self.flags = flags + + +#: The default resolver. +default_resolver = None + + +def get_default_resolver(): + """Get the default resolver, initializing it if necessary.""" + if default_resolver is None: + reset_default_resolver() + return default_resolver + + +def reset_default_resolver(): + """Re-initialize default resolver. + + Note that the resolver configuration (i.e. /etc/resolv.conf on UNIX + systems) will be re-read immediately. + """ + + global default_resolver + default_resolver = Resolver() + + +def query(qname, rdtype=dns.rdatatype.A, rdclass=dns.rdataclass.IN, + tcp=False, source=None, raise_on_no_answer=True, + source_port=0, lifetime=None): + """Query nameservers to find the answer to the question. + + This is a convenience function that uses the default resolver + object to make the query. + + See ``dns.resolver.Resolver.query`` for more information on the + parameters. + """ + + return get_default_resolver().query(qname, rdtype, rdclass, tcp, source, + raise_on_no_answer, source_port, + lifetime) + + +def zone_for_name(name, rdclass=dns.rdataclass.IN, tcp=False, resolver=None): + """Find the name of the zone which contains the specified name. + + *name*, an absolute ``dns.name.Name`` or ``text``, the query name. + + *rdclass*, an ``int``, the query class. + + *tcp*, a ``bool``. If ``True``, use TCP to make the query. + + *resolver*, a ``dns.resolver.Resolver`` or ``None``, the resolver to use. + If ``None``, the default resolver is used. + + Raises ``dns.resolver.NoRootSOA`` if there is no SOA RR at the DNS + root. (This is only likely to happen if you're using non-default + root servers in your network and they are misconfigured.) + + Returns a ``dns.name.Name``. + """ + + if isinstance(name, string_types): + name = dns.name.from_text(name, dns.name.root) + if resolver is None: + resolver = get_default_resolver() + if not name.is_absolute(): + raise NotAbsolute(name) + while 1: + try: + answer = resolver.query(name, dns.rdatatype.SOA, rdclass, tcp) + if answer.rrset.name == name: + return name + # otherwise we were CNAMEd or DNAMEd and need to look higher + except (dns.resolver.NXDOMAIN, dns.resolver.NoAnswer): + pass + try: + name = name.parent() + except dns.name.NoParent: + raise NoRootSOA + +# +# Support for overriding the system resolver for all python code in the +# running process. +# + +_protocols_for_socktype = { + socket.SOCK_DGRAM: [socket.SOL_UDP], + socket.SOCK_STREAM: [socket.SOL_TCP], +} + +_resolver = None +_original_getaddrinfo = socket.getaddrinfo +_original_getnameinfo = socket.getnameinfo +_original_getfqdn = socket.getfqdn +_original_gethostbyname = socket.gethostbyname +_original_gethostbyname_ex = socket.gethostbyname_ex +_original_gethostbyaddr = socket.gethostbyaddr + + +def _getaddrinfo(host=None, service=None, family=socket.AF_UNSPEC, socktype=0, + proto=0, flags=0): + if flags & (socket.AI_ADDRCONFIG | socket.AI_V4MAPPED) != 0: + raise NotImplementedError + if host is None and service is None: + raise socket.gaierror(socket.EAI_NONAME) + v6addrs = [] + v4addrs = [] + canonical_name = None + try: + # Is host None or a V6 address literal? + if host is None: + canonical_name = 'localhost' + if flags & socket.AI_PASSIVE != 0: + v6addrs.append('::') + v4addrs.append('0.0.0.0') + else: + v6addrs.append('::1') + v4addrs.append('127.0.0.1') + else: + parts = host.split('%') + if len(parts) == 2: + ahost = parts[0] + else: + ahost = host + addr = dns.ipv6.inet_aton(ahost) + v6addrs.append(host) + canonical_name = host + except Exception: + try: + # Is it a V4 address literal? + addr = dns.ipv4.inet_aton(host) + v4addrs.append(host) + canonical_name = host + except Exception: + if flags & socket.AI_NUMERICHOST == 0: + try: + if family == socket.AF_INET6 or family == socket.AF_UNSPEC: + v6 = _resolver.query(host, dns.rdatatype.AAAA, + raise_on_no_answer=False) + # Note that setting host ensures we query the same name + # for A as we did for AAAA. + host = v6.qname + canonical_name = v6.canonical_name.to_text(True) + if v6.rrset is not None: + for rdata in v6.rrset: + v6addrs.append(rdata.address) + if family == socket.AF_INET or family == socket.AF_UNSPEC: + v4 = _resolver.query(host, dns.rdatatype.A, + raise_on_no_answer=False) + host = v4.qname + canonical_name = v4.canonical_name.to_text(True) + if v4.rrset is not None: + for rdata in v4.rrset: + v4addrs.append(rdata.address) + except dns.resolver.NXDOMAIN: + raise socket.gaierror(socket.EAI_NONAME) + except Exception: + raise socket.gaierror(socket.EAI_SYSTEM) + port = None + try: + # Is it a port literal? + if service is None: + port = 0 + else: + port = int(service) + except Exception: + if flags & socket.AI_NUMERICSERV == 0: + try: + port = socket.getservbyname(service) + except Exception: + pass + if port is None: + raise socket.gaierror(socket.EAI_NONAME) + tuples = [] + if socktype == 0: + socktypes = [socket.SOCK_DGRAM, socket.SOCK_STREAM] + else: + socktypes = [socktype] + if flags & socket.AI_CANONNAME != 0: + cname = canonical_name + else: + cname = '' + if family == socket.AF_INET6 or family == socket.AF_UNSPEC: + for addr in v6addrs: + for socktype in socktypes: + for proto in _protocols_for_socktype[socktype]: + tuples.append((socket.AF_INET6, socktype, proto, + cname, (addr, port, 0, 0))) + if family == socket.AF_INET or family == socket.AF_UNSPEC: + for addr in v4addrs: + for socktype in socktypes: + for proto in _protocols_for_socktype[socktype]: + tuples.append((socket.AF_INET, socktype, proto, + cname, (addr, port))) + if len(tuples) == 0: + raise socket.gaierror(socket.EAI_NONAME) + return tuples + + +def _getnameinfo(sockaddr, flags=0): + host = sockaddr[0] + port = sockaddr[1] + if len(sockaddr) == 4: + scope = sockaddr[3] + family = socket.AF_INET6 + else: + scope = None + family = socket.AF_INET + tuples = _getaddrinfo(host, port, family, socket.SOCK_STREAM, + socket.SOL_TCP, 0) + if len(tuples) > 1: + raise socket.error('sockaddr resolved to multiple addresses') + addr = tuples[0][4][0] + if flags & socket.NI_DGRAM: + pname = 'udp' + else: + pname = 'tcp' + qname = dns.reversename.from_address(addr) + if flags & socket.NI_NUMERICHOST == 0: + try: + answer = _resolver.query(qname, 'PTR') + hostname = answer.rrset[0].target.to_text(True) + except (dns.resolver.NXDOMAIN, dns.resolver.NoAnswer): + if flags & socket.NI_NAMEREQD: + raise socket.gaierror(socket.EAI_NONAME) + hostname = addr + if scope is not None: + hostname += '%' + str(scope) + else: + hostname = addr + if scope is not None: + hostname += '%' + str(scope) + if flags & socket.NI_NUMERICSERV: + service = str(port) + else: + service = socket.getservbyport(port, pname) + return (hostname, service) + + +def _getfqdn(name=None): + if name is None: + name = socket.gethostname() + try: + return _getnameinfo(_getaddrinfo(name, 80)[0][4])[0] + except Exception: + return name + + +def _gethostbyname(name): + return _gethostbyname_ex(name)[2][0] + + +def _gethostbyname_ex(name): + aliases = [] + addresses = [] + tuples = _getaddrinfo(name, 0, socket.AF_INET, socket.SOCK_STREAM, + socket.SOL_TCP, socket.AI_CANONNAME) + canonical = tuples[0][3] + for item in tuples: + addresses.append(item[4][0]) + # XXX we just ignore aliases + return (canonical, aliases, addresses) + + +def _gethostbyaddr(ip): + try: + dns.ipv6.inet_aton(ip) + sockaddr = (ip, 80, 0, 0) + family = socket.AF_INET6 + except Exception: + sockaddr = (ip, 80) + family = socket.AF_INET + (name, port) = _getnameinfo(sockaddr, socket.NI_NAMEREQD) + aliases = [] + addresses = [] + tuples = _getaddrinfo(name, 0, family, socket.SOCK_STREAM, socket.SOL_TCP, + socket.AI_CANONNAME) + canonical = tuples[0][3] + for item in tuples: + addresses.append(item[4][0]) + # XXX we just ignore aliases + return (canonical, aliases, addresses) + + +def override_system_resolver(resolver=None): + """Override the system resolver routines in the socket module with + versions which use dnspython's resolver. + + This can be useful in testing situations where you want to control + the resolution behavior of python code without having to change + the system's resolver settings (e.g. /etc/resolv.conf). + + The resolver to use may be specified; if it's not, the default + resolver will be used. + + resolver, a ``dns.resolver.Resolver`` or ``None``, the resolver to use. + """ + + if resolver is None: + resolver = get_default_resolver() + global _resolver + _resolver = resolver + socket.getaddrinfo = _getaddrinfo + socket.getnameinfo = _getnameinfo + socket.getfqdn = _getfqdn + socket.gethostbyname = _gethostbyname + socket.gethostbyname_ex = _gethostbyname_ex + socket.gethostbyaddr = _gethostbyaddr + + +def restore_system_resolver(): + """Undo the effects of prior override_system_resolver().""" + + global _resolver + _resolver = None + socket.getaddrinfo = _original_getaddrinfo + socket.getnameinfo = _original_getnameinfo + socket.getfqdn = _original_getfqdn + socket.gethostbyname = _original_gethostbyname + socket.gethostbyname_ex = _original_gethostbyname_ex + socket.gethostbyaddr = _original_gethostbyaddr diff --git a/openpype/vendor/python/python_2/dns/reversename.py b/openpype/vendor/python/python_2/dns/reversename.py new file mode 100644 index 0000000000..8f095fa91e --- /dev/null +++ b/openpype/vendor/python/python_2/dns/reversename.py @@ -0,0 +1,96 @@ +# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license + +# Copyright (C) 2006-2017 Nominum, Inc. +# +# Permission to use, copy, modify, and distribute this software and its +# documentation for any purpose with or without fee is hereby granted, +# provided that the above copyright notice and this permission notice +# appear in all copies. +# +# THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES +# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR +# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT +# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. + +"""DNS Reverse Map Names.""" + +import binascii + +import dns.name +import dns.ipv6 +import dns.ipv4 + +from dns._compat import PY3 + +ipv4_reverse_domain = dns.name.from_text('in-addr.arpa.') +ipv6_reverse_domain = dns.name.from_text('ip6.arpa.') + + +def from_address(text): + """Convert an IPv4 or IPv6 address in textual form into a Name object whose + value is the reverse-map domain name of the address. + + *text*, a ``text``, is an IPv4 or IPv6 address in textual form + (e.g. '127.0.0.1', '::1') + + Raises ``dns.exception.SyntaxError`` if the address is badly formed. + + Returns a ``dns.name.Name``. + """ + + try: + v6 = dns.ipv6.inet_aton(text) + if dns.ipv6.is_mapped(v6): + if PY3: + parts = ['%d' % byte for byte in v6[12:]] + else: + parts = ['%d' % ord(byte) for byte in v6[12:]] + origin = ipv4_reverse_domain + else: + parts = [x for x in str(binascii.hexlify(v6).decode())] + origin = ipv6_reverse_domain + except Exception: + parts = ['%d' % + byte for byte in bytearray(dns.ipv4.inet_aton(text))] + origin = ipv4_reverse_domain + parts.reverse() + return dns.name.from_text('.'.join(parts), origin=origin) + + +def to_address(name): + """Convert a reverse map domain name into textual address form. + + *name*, a ``dns.name.Name``, an IPv4 or IPv6 address in reverse-map name + form. + + Raises ``dns.exception.SyntaxError`` if the name does not have a + reverse-map form. + + Returns a ``text``. + """ + + if name.is_subdomain(ipv4_reverse_domain): + name = name.relativize(ipv4_reverse_domain) + labels = list(name.labels) + labels.reverse() + text = b'.'.join(labels) + # run through inet_aton() to check syntax and make pretty. + return dns.ipv4.inet_ntoa(dns.ipv4.inet_aton(text)) + elif name.is_subdomain(ipv6_reverse_domain): + name = name.relativize(ipv6_reverse_domain) + labels = list(name.labels) + labels.reverse() + parts = [] + i = 0 + l = len(labels) + while i < l: + parts.append(b''.join(labels[i:i + 4])) + i += 4 + text = b':'.join(parts) + # run through inet_aton() to check syntax and make pretty. + return dns.ipv6.inet_ntoa(dns.ipv6.inet_aton(text)) + else: + raise dns.exception.SyntaxError('unknown reverse-map address family') diff --git a/openpype/vendor/python/python_2/dns/rrset.py b/openpype/vendor/python/python_2/dns/rrset.py new file mode 100644 index 0000000000..a53ec324b8 --- /dev/null +++ b/openpype/vendor/python/python_2/dns/rrset.py @@ -0,0 +1,189 @@ +# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license + +# Copyright (C) 2003-2017 Nominum, Inc. +# +# Permission to use, copy, modify, and distribute this software and its +# documentation for any purpose with or without fee is hereby granted, +# provided that the above copyright notice and this permission notice +# appear in all copies. +# +# THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES +# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR +# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT +# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. + +"""DNS RRsets (an RRset is a named rdataset)""" + + +import dns.name +import dns.rdataset +import dns.rdataclass +import dns.renderer +from ._compat import string_types + + +class RRset(dns.rdataset.Rdataset): + + """A DNS RRset (named rdataset). + + RRset inherits from Rdataset, and RRsets can be treated as + Rdatasets in most cases. There are, however, a few notable + exceptions. RRsets have different to_wire() and to_text() method + arguments, reflecting the fact that RRsets always have an owner + name. + """ + + __slots__ = ['name', 'deleting'] + + def __init__(self, name, rdclass, rdtype, covers=dns.rdatatype.NONE, + deleting=None): + """Create a new RRset.""" + + super(RRset, self).__init__(rdclass, rdtype, covers) + self.name = name + self.deleting = deleting + + def _clone(self): + obj = super(RRset, self)._clone() + obj.name = self.name + obj.deleting = self.deleting + return obj + + def __repr__(self): + if self.covers == 0: + ctext = '' + else: + ctext = '(' + dns.rdatatype.to_text(self.covers) + ')' + if self.deleting is not None: + dtext = ' delete=' + dns.rdataclass.to_text(self.deleting) + else: + dtext = '' + return '' + + def __str__(self): + return self.to_text() + + def __eq__(self, other): + if not isinstance(other, RRset): + return False + if self.name != other.name: + return False + return super(RRset, self).__eq__(other) + + def match(self, name, rdclass, rdtype, covers, deleting=None): + """Returns ``True`` if this rrset matches the specified class, type, + covers, and deletion state. + """ + + if not super(RRset, self).match(rdclass, rdtype, covers): + return False + if self.name != name or self.deleting != deleting: + return False + return True + + def to_text(self, origin=None, relativize=True, **kw): + """Convert the RRset into DNS master file format. + + See ``dns.name.Name.choose_relativity`` for more information + on how *origin* and *relativize* determine the way names + are emitted. + + Any additional keyword arguments are passed on to the rdata + ``to_text()`` method. + + *origin*, a ``dns.name.Name`` or ``None``, the origin for relative + names. + + *relativize*, a ``bool``. If ``True``, names will be relativized + to *origin*. + """ + + return super(RRset, self).to_text(self.name, origin, relativize, + self.deleting, **kw) + + def to_wire(self, file, compress=None, origin=None, **kw): + """Convert the RRset to wire format. + + All keyword arguments are passed to ``dns.rdataset.to_wire()``; see + that function for details. + + Returns an ``int``, the number of records emitted. + """ + + return super(RRset, self).to_wire(self.name, file, compress, origin, + self.deleting, **kw) + + def to_rdataset(self): + """Convert an RRset into an Rdataset. + + Returns a ``dns.rdataset.Rdataset``. + """ + return dns.rdataset.from_rdata_list(self.ttl, list(self)) + + +def from_text_list(name, ttl, rdclass, rdtype, text_rdatas, + idna_codec=None): + """Create an RRset with the specified name, TTL, class, and type, and with + the specified list of rdatas in text format. + + Returns a ``dns.rrset.RRset`` object. + """ + + if isinstance(name, string_types): + name = dns.name.from_text(name, None, idna_codec=idna_codec) + if isinstance(rdclass, string_types): + rdclass = dns.rdataclass.from_text(rdclass) + if isinstance(rdtype, string_types): + rdtype = dns.rdatatype.from_text(rdtype) + r = RRset(name, rdclass, rdtype) + r.update_ttl(ttl) + for t in text_rdatas: + rd = dns.rdata.from_text(r.rdclass, r.rdtype, t) + r.add(rd) + return r + + +def from_text(name, ttl, rdclass, rdtype, *text_rdatas): + """Create an RRset with the specified name, TTL, class, and type and with + the specified rdatas in text format. + + Returns a ``dns.rrset.RRset`` object. + """ + + return from_text_list(name, ttl, rdclass, rdtype, text_rdatas) + + +def from_rdata_list(name, ttl, rdatas, idna_codec=None): + """Create an RRset with the specified name and TTL, and with + the specified list of rdata objects. + + Returns a ``dns.rrset.RRset`` object. + """ + + if isinstance(name, string_types): + name = dns.name.from_text(name, None, idna_codec=idna_codec) + + if len(rdatas) == 0: + raise ValueError("rdata list must not be empty") + r = None + for rd in rdatas: + if r is None: + r = RRset(name, rd.rdclass, rd.rdtype) + r.update_ttl(ttl) + r.add(rd) + return r + + +def from_rdata(name, ttl, *rdatas): + """Create an RRset with the specified name and TTL, and with + the specified rdata objects. + + Returns a ``dns.rrset.RRset`` object. + """ + + return from_rdata_list(name, ttl, rdatas) diff --git a/openpype/vendor/python/python_2/dns/set.py b/openpype/vendor/python/python_2/dns/set.py new file mode 100644 index 0000000000..81329bf457 --- /dev/null +++ b/openpype/vendor/python/python_2/dns/set.py @@ -0,0 +1,261 @@ +# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license + +# Copyright (C) 2003-2017 Nominum, Inc. +# +# Permission to use, copy, modify, and distribute this software and its +# documentation for any purpose with or without fee is hereby granted, +# provided that the above copyright notice and this permission notice +# appear in all copies. +# +# THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES +# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR +# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT +# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. + +class Set(object): + + """A simple set class. + + This class was originally used to deal with sets being missing in + ancient versions of python, but dnspython will continue to use it + as these sets are based on lists and are thus indexable, and this + ability is widely used in dnspython applications. + """ + + __slots__ = ['items'] + + def __init__(self, items=None): + """Initialize the set. + + *items*, an iterable or ``None``, the initial set of items. + """ + + self.items = [] + if items is not None: + for item in items: + self.add(item) + + def __repr__(self): + return "dns.simpleset.Set(%s)" % repr(self.items) + + def add(self, item): + """Add an item to the set. + """ + + if item not in self.items: + self.items.append(item) + + def remove(self, item): + """Remove an item from the set. + """ + + self.items.remove(item) + + def discard(self, item): + """Remove an item from the set if present. + """ + + try: + self.items.remove(item) + except ValueError: + pass + + def _clone(self): + """Make a (shallow) copy of the set. + + There is a 'clone protocol' that subclasses of this class + should use. To make a copy, first call your super's _clone() + method, and use the object returned as the new instance. Then + make shallow copies of the attributes defined in the subclass. + + This protocol allows us to write the set algorithms that + return new instances (e.g. union) once, and keep using them in + subclasses. + """ + + cls = self.__class__ + obj = cls.__new__(cls) + obj.items = list(self.items) + return obj + + def __copy__(self): + """Make a (shallow) copy of the set. + """ + + return self._clone() + + def copy(self): + """Make a (shallow) copy of the set. + """ + + return self._clone() + + def union_update(self, other): + """Update the set, adding any elements from other which are not + already in the set. + """ + + if not isinstance(other, Set): + raise ValueError('other must be a Set instance') + if self is other: + return + for item in other.items: + self.add(item) + + def intersection_update(self, other): + """Update the set, removing any elements from other which are not + in both sets. + """ + + if not isinstance(other, Set): + raise ValueError('other must be a Set instance') + if self is other: + return + # we make a copy of the list so that we can remove items from + # the list without breaking the iterator. + for item in list(self.items): + if item not in other.items: + self.items.remove(item) + + def difference_update(self, other): + """Update the set, removing any elements from other which are in + the set. + """ + + if not isinstance(other, Set): + raise ValueError('other must be a Set instance') + if self is other: + self.items = [] + else: + for item in other.items: + self.discard(item) + + def union(self, other): + """Return a new set which is the union of ``self`` and ``other``. + + Returns the same Set type as this set. + """ + + obj = self._clone() + obj.union_update(other) + return obj + + def intersection(self, other): + """Return a new set which is the intersection of ``self`` and + ``other``. + + Returns the same Set type as this set. + """ + + obj = self._clone() + obj.intersection_update(other) + return obj + + def difference(self, other): + """Return a new set which ``self`` - ``other``, i.e. the items + in ``self`` which are not also in ``other``. + + Returns the same Set type as this set. + """ + + obj = self._clone() + obj.difference_update(other) + return obj + + def __or__(self, other): + return self.union(other) + + def __and__(self, other): + return self.intersection(other) + + def __add__(self, other): + return self.union(other) + + def __sub__(self, other): + return self.difference(other) + + def __ior__(self, other): + self.union_update(other) + return self + + def __iand__(self, other): + self.intersection_update(other) + return self + + def __iadd__(self, other): + self.union_update(other) + return self + + def __isub__(self, other): + self.difference_update(other) + return self + + def update(self, other): + """Update the set, adding any elements from other which are not + already in the set. + + *other*, the collection of items with which to update the set, which + may be any iterable type. + """ + + for item in other: + self.add(item) + + def clear(self): + """Make the set empty.""" + self.items = [] + + def __eq__(self, other): + # Yes, this is inefficient but the sets we're dealing with are + # usually quite small, so it shouldn't hurt too much. + for item in self.items: + if item not in other.items: + return False + for item in other.items: + if item not in self.items: + return False + return True + + def __ne__(self, other): + return not self.__eq__(other) + + def __len__(self): + return len(self.items) + + def __iter__(self): + return iter(self.items) + + def __getitem__(self, i): + return self.items[i] + + def __delitem__(self, i): + del self.items[i] + + def issubset(self, other): + """Is this set a subset of *other*? + + Returns a ``bool``. + """ + + if not isinstance(other, Set): + raise ValueError('other must be a Set instance') + for item in self.items: + if item not in other.items: + return False + return True + + def issuperset(self, other): + """Is this set a superset of *other*? + + Returns a ``bool``. + """ + + if not isinstance(other, Set): + raise ValueError('other must be a Set instance') + for item in other.items: + if item not in self.items: + return False + return True diff --git a/openpype/vendor/python/python_2/dns/tokenizer.py b/openpype/vendor/python/python_2/dns/tokenizer.py new file mode 100644 index 0000000000..880b71ce7a --- /dev/null +++ b/openpype/vendor/python/python_2/dns/tokenizer.py @@ -0,0 +1,571 @@ +# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license + +# Copyright (C) 2003-2017 Nominum, Inc. +# +# Permission to use, copy, modify, and distribute this software and its +# documentation for any purpose with or without fee is hereby granted, +# provided that the above copyright notice and this permission notice +# appear in all copies. +# +# THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES +# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR +# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT +# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. + +"""Tokenize DNS master file format""" + +from io import StringIO +import sys + +import dns.exception +import dns.name +import dns.ttl +from ._compat import long, text_type, binary_type + +_DELIMITERS = { + ' ': True, + '\t': True, + '\n': True, + ';': True, + '(': True, + ')': True, + '"': True} + +_QUOTING_DELIMITERS = {'"': True} + +EOF = 0 +EOL = 1 +WHITESPACE = 2 +IDENTIFIER = 3 +QUOTED_STRING = 4 +COMMENT = 5 +DELIMITER = 6 + + +class UngetBufferFull(dns.exception.DNSException): + """An attempt was made to unget a token when the unget buffer was full.""" + + +class Token(object): + """A DNS master file format token. + + ttype: The token type + value: The token value + has_escape: Does the token value contain escapes? + """ + + def __init__(self, ttype, value='', has_escape=False): + """Initialize a token instance.""" + + self.ttype = ttype + self.value = value + self.has_escape = has_escape + + def is_eof(self): + return self.ttype == EOF + + def is_eol(self): + return self.ttype == EOL + + def is_whitespace(self): + return self.ttype == WHITESPACE + + def is_identifier(self): + return self.ttype == IDENTIFIER + + def is_quoted_string(self): + return self.ttype == QUOTED_STRING + + def is_comment(self): + return self.ttype == COMMENT + + def is_delimiter(self): + return self.ttype == DELIMITER + + def is_eol_or_eof(self): + return self.ttype == EOL or self.ttype == EOF + + def __eq__(self, other): + if not isinstance(other, Token): + return False + return (self.ttype == other.ttype and + self.value == other.value) + + def __ne__(self, other): + if not isinstance(other, Token): + return True + return (self.ttype != other.ttype or + self.value != other.value) + + def __str__(self): + return '%d "%s"' % (self.ttype, self.value) + + def unescape(self): + if not self.has_escape: + return self + unescaped = '' + l = len(self.value) + i = 0 + while i < l: + c = self.value[i] + i += 1 + if c == '\\': + if i >= l: + raise dns.exception.UnexpectedEnd + c = self.value[i] + i += 1 + if c.isdigit(): + if i >= l: + raise dns.exception.UnexpectedEnd + c2 = self.value[i] + i += 1 + if i >= l: + raise dns.exception.UnexpectedEnd + c3 = self.value[i] + i += 1 + if not (c2.isdigit() and c3.isdigit()): + raise dns.exception.SyntaxError + c = chr(int(c) * 100 + int(c2) * 10 + int(c3)) + unescaped += c + return Token(self.ttype, unescaped) + + # compatibility for old-style tuple tokens + + def __len__(self): + return 2 + + def __iter__(self): + return iter((self.ttype, self.value)) + + def __getitem__(self, i): + if i == 0: + return self.ttype + elif i == 1: + return self.value + else: + raise IndexError + + +class Tokenizer(object): + """A DNS master file format tokenizer. + + A token object is basically a (type, value) tuple. The valid + types are EOF, EOL, WHITESPACE, IDENTIFIER, QUOTED_STRING, + COMMENT, and DELIMITER. + + file: The file to tokenize + + ungotten_char: The most recently ungotten character, or None. + + ungotten_token: The most recently ungotten token, or None. + + multiline: The current multiline level. This value is increased + by one every time a '(' delimiter is read, and decreased by one every time + a ')' delimiter is read. + + quoting: This variable is true if the tokenizer is currently + reading a quoted string. + + eof: This variable is true if the tokenizer has encountered EOF. + + delimiters: The current delimiter dictionary. + + line_number: The current line number + + filename: A filename that will be returned by the where() method. + """ + + def __init__(self, f=sys.stdin, filename=None): + """Initialize a tokenizer instance. + + f: The file to tokenize. The default is sys.stdin. + This parameter may also be a string, in which case the tokenizer + will take its input from the contents of the string. + + filename: the name of the filename that the where() method + will return. + """ + + if isinstance(f, text_type): + f = StringIO(f) + if filename is None: + filename = '' + elif isinstance(f, binary_type): + f = StringIO(f.decode()) + if filename is None: + filename = '' + else: + if filename is None: + if f is sys.stdin: + filename = '' + else: + filename = '' + self.file = f + self.ungotten_char = None + self.ungotten_token = None + self.multiline = 0 + self.quoting = False + self.eof = False + self.delimiters = _DELIMITERS + self.line_number = 1 + self.filename = filename + + def _get_char(self): + """Read a character from input. + """ + + if self.ungotten_char is None: + if self.eof: + c = '' + else: + c = self.file.read(1) + if c == '': + self.eof = True + elif c == '\n': + self.line_number += 1 + else: + c = self.ungotten_char + self.ungotten_char = None + return c + + def where(self): + """Return the current location in the input. + + Returns a (string, int) tuple. The first item is the filename of + the input, the second is the current line number. + """ + + return (self.filename, self.line_number) + + def _unget_char(self, c): + """Unget a character. + + The unget buffer for characters is only one character large; it is + an error to try to unget a character when the unget buffer is not + empty. + + c: the character to unget + raises UngetBufferFull: there is already an ungotten char + """ + + if self.ungotten_char is not None: + raise UngetBufferFull + self.ungotten_char = c + + def skip_whitespace(self): + """Consume input until a non-whitespace character is encountered. + + The non-whitespace character is then ungotten, and the number of + whitespace characters consumed is returned. + + If the tokenizer is in multiline mode, then newlines are whitespace. + + Returns the number of characters skipped. + """ + + skipped = 0 + while True: + c = self._get_char() + if c != ' ' and c != '\t': + if (c != '\n') or not self.multiline: + self._unget_char(c) + return skipped + skipped += 1 + + def get(self, want_leading=False, want_comment=False): + """Get the next token. + + want_leading: If True, return a WHITESPACE token if the + first character read is whitespace. The default is False. + + want_comment: If True, return a COMMENT token if the + first token read is a comment. The default is False. + + Raises dns.exception.UnexpectedEnd: input ended prematurely + + Raises dns.exception.SyntaxError: input was badly formed + + Returns a Token. + """ + + if self.ungotten_token is not None: + token = self.ungotten_token + self.ungotten_token = None + if token.is_whitespace(): + if want_leading: + return token + elif token.is_comment(): + if want_comment: + return token + else: + return token + skipped = self.skip_whitespace() + if want_leading and skipped > 0: + return Token(WHITESPACE, ' ') + token = '' + ttype = IDENTIFIER + has_escape = False + while True: + c = self._get_char() + if c == '' or c in self.delimiters: + if c == '' and self.quoting: + raise dns.exception.UnexpectedEnd + if token == '' and ttype != QUOTED_STRING: + if c == '(': + self.multiline += 1 + self.skip_whitespace() + continue + elif c == ')': + if self.multiline <= 0: + raise dns.exception.SyntaxError + self.multiline -= 1 + self.skip_whitespace() + continue + elif c == '"': + if not self.quoting: + self.quoting = True + self.delimiters = _QUOTING_DELIMITERS + ttype = QUOTED_STRING + continue + else: + self.quoting = False + self.delimiters = _DELIMITERS + self.skip_whitespace() + continue + elif c == '\n': + return Token(EOL, '\n') + elif c == ';': + while 1: + c = self._get_char() + if c == '\n' or c == '': + break + token += c + if want_comment: + self._unget_char(c) + return Token(COMMENT, token) + elif c == '': + if self.multiline: + raise dns.exception.SyntaxError( + 'unbalanced parentheses') + return Token(EOF) + elif self.multiline: + self.skip_whitespace() + token = '' + continue + else: + return Token(EOL, '\n') + else: + # This code exists in case we ever want a + # delimiter to be returned. It never produces + # a token currently. + token = c + ttype = DELIMITER + else: + self._unget_char(c) + break + elif self.quoting: + if c == '\\': + c = self._get_char() + if c == '': + raise dns.exception.UnexpectedEnd + if c.isdigit(): + c2 = self._get_char() + if c2 == '': + raise dns.exception.UnexpectedEnd + c3 = self._get_char() + if c == '': + raise dns.exception.UnexpectedEnd + if not (c2.isdigit() and c3.isdigit()): + raise dns.exception.SyntaxError + c = chr(int(c) * 100 + int(c2) * 10 + int(c3)) + elif c == '\n': + raise dns.exception.SyntaxError('newline in quoted string') + elif c == '\\': + # + # It's an escape. Put it and the next character into + # the token; it will be checked later for goodness. + # + token += c + has_escape = True + c = self._get_char() + if c == '' or c == '\n': + raise dns.exception.UnexpectedEnd + token += c + if token == '' and ttype != QUOTED_STRING: + if self.multiline: + raise dns.exception.SyntaxError('unbalanced parentheses') + ttype = EOF + return Token(ttype, token, has_escape) + + def unget(self, token): + """Unget a token. + + The unget buffer for tokens is only one token large; it is + an error to try to unget a token when the unget buffer is not + empty. + + token: the token to unget + + Raises UngetBufferFull: there is already an ungotten token + """ + + if self.ungotten_token is not None: + raise UngetBufferFull + self.ungotten_token = token + + def next(self): + """Return the next item in an iteration. + + Returns a Token. + """ + + token = self.get() + if token.is_eof(): + raise StopIteration + return token + + __next__ = next + + def __iter__(self): + return self + + # Helpers + + def get_int(self, base=10): + """Read the next token and interpret it as an integer. + + Raises dns.exception.SyntaxError if not an integer. + + Returns an int. + """ + + token = self.get().unescape() + if not token.is_identifier(): + raise dns.exception.SyntaxError('expecting an identifier') + if not token.value.isdigit(): + raise dns.exception.SyntaxError('expecting an integer') + return int(token.value, base) + + def get_uint8(self): + """Read the next token and interpret it as an 8-bit unsigned + integer. + + Raises dns.exception.SyntaxError if not an 8-bit unsigned integer. + + Returns an int. + """ + + value = self.get_int() + if value < 0 or value > 255: + raise dns.exception.SyntaxError( + '%d is not an unsigned 8-bit integer' % value) + return value + + def get_uint16(self, base=10): + """Read the next token and interpret it as a 16-bit unsigned + integer. + + Raises dns.exception.SyntaxError if not a 16-bit unsigned integer. + + Returns an int. + """ + + value = self.get_int(base=base) + if value < 0 or value > 65535: + if base == 8: + raise dns.exception.SyntaxError( + '%o is not an octal unsigned 16-bit integer' % value) + else: + raise dns.exception.SyntaxError( + '%d is not an unsigned 16-bit integer' % value) + return value + + def get_uint32(self): + """Read the next token and interpret it as a 32-bit unsigned + integer. + + Raises dns.exception.SyntaxError if not a 32-bit unsigned integer. + + Returns an int. + """ + + token = self.get().unescape() + if not token.is_identifier(): + raise dns.exception.SyntaxError('expecting an identifier') + if not token.value.isdigit(): + raise dns.exception.SyntaxError('expecting an integer') + value = long(token.value) + if value < 0 or value > long(4294967296): + raise dns.exception.SyntaxError( + '%d is not an unsigned 32-bit integer' % value) + return value + + def get_string(self, origin=None): + """Read the next token and interpret it as a string. + + Raises dns.exception.SyntaxError if not a string. + + Returns a string. + """ + + token = self.get().unescape() + if not (token.is_identifier() or token.is_quoted_string()): + raise dns.exception.SyntaxError('expecting a string') + return token.value + + def get_identifier(self, origin=None): + """Read the next token, which should be an identifier. + + Raises dns.exception.SyntaxError if not an identifier. + + Returns a string. + """ + + token = self.get().unescape() + if not token.is_identifier(): + raise dns.exception.SyntaxError('expecting an identifier') + return token.value + + def get_name(self, origin=None): + """Read the next token and interpret it as a DNS name. + + Raises dns.exception.SyntaxError if not a name. + + Returns a dns.name.Name. + """ + + token = self.get() + if not token.is_identifier(): + raise dns.exception.SyntaxError('expecting an identifier') + return dns.name.from_text(token.value, origin) + + def get_eol(self): + """Read the next token and raise an exception if it isn't EOL or + EOF. + + Returns a string. + """ + + token = self.get() + if not token.is_eol_or_eof(): + raise dns.exception.SyntaxError( + 'expected EOL or EOF, got %d "%s"' % (token.ttype, + token.value)) + return token.value + + def get_ttl(self): + """Read the next token and interpret it as a DNS TTL. + + Raises dns.exception.SyntaxError or dns.ttl.BadTTL if not an + identifier or badly formed. + + Returns an int. + """ + + token = self.get().unescape() + if not token.is_identifier(): + raise dns.exception.SyntaxError('expecting an identifier') + return dns.ttl.from_text(token.value) diff --git a/openpype/vendor/python/python_2/dns/tsig.py b/openpype/vendor/python/python_2/dns/tsig.py new file mode 100644 index 0000000000..3daa387855 --- /dev/null +++ b/openpype/vendor/python/python_2/dns/tsig.py @@ -0,0 +1,236 @@ +# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license + +# Copyright (C) 2001-2007, 2009-2011 Nominum, Inc. +# +# Permission to use, copy, modify, and distribute this software and its +# documentation for any purpose with or without fee is hereby granted, +# provided that the above copyright notice and this permission notice +# appear in all copies. +# +# THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES +# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR +# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT +# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. + +"""DNS TSIG support.""" + +import hashlib +import hmac +import struct + +import dns.exception +import dns.rdataclass +import dns.name +from ._compat import long, string_types, text_type + +class BadTime(dns.exception.DNSException): + + """The current time is not within the TSIG's validity time.""" + + +class BadSignature(dns.exception.DNSException): + + """The TSIG signature fails to verify.""" + + +class PeerError(dns.exception.DNSException): + + """Base class for all TSIG errors generated by the remote peer""" + + +class PeerBadKey(PeerError): + + """The peer didn't know the key we used""" + + +class PeerBadSignature(PeerError): + + """The peer didn't like the signature we sent""" + + +class PeerBadTime(PeerError): + + """The peer didn't like the time we sent""" + + +class PeerBadTruncation(PeerError): + + """The peer didn't like amount of truncation in the TSIG we sent""" + +# TSIG Algorithms + +HMAC_MD5 = dns.name.from_text("HMAC-MD5.SIG-ALG.REG.INT") +HMAC_SHA1 = dns.name.from_text("hmac-sha1") +HMAC_SHA224 = dns.name.from_text("hmac-sha224") +HMAC_SHA256 = dns.name.from_text("hmac-sha256") +HMAC_SHA384 = dns.name.from_text("hmac-sha384") +HMAC_SHA512 = dns.name.from_text("hmac-sha512") + +_hashes = { + HMAC_SHA224: hashlib.sha224, + HMAC_SHA256: hashlib.sha256, + HMAC_SHA384: hashlib.sha384, + HMAC_SHA512: hashlib.sha512, + HMAC_SHA1: hashlib.sha1, + HMAC_MD5: hashlib.md5, +} + +default_algorithm = HMAC_MD5 + +BADSIG = 16 +BADKEY = 17 +BADTIME = 18 +BADTRUNC = 22 + + +def sign(wire, keyname, secret, time, fudge, original_id, error, + other_data, request_mac, ctx=None, multi=False, first=True, + algorithm=default_algorithm): + """Return a (tsig_rdata, mac, ctx) tuple containing the HMAC TSIG rdata + for the input parameters, the HMAC MAC calculated by applying the + TSIG signature algorithm, and the TSIG digest context. + @rtype: (string, string, hmac.HMAC object) + @raises ValueError: I{other_data} is too long + @raises NotImplementedError: I{algorithm} is not supported + """ + + if isinstance(other_data, text_type): + other_data = other_data.encode() + (algorithm_name, digestmod) = get_algorithm(algorithm) + if first: + ctx = hmac.new(secret, digestmod=digestmod) + ml = len(request_mac) + if ml > 0: + ctx.update(struct.pack('!H', ml)) + ctx.update(request_mac) + id = struct.pack('!H', original_id) + ctx.update(id) + ctx.update(wire[2:]) + if first: + ctx.update(keyname.to_digestable()) + ctx.update(struct.pack('!H', dns.rdataclass.ANY)) + ctx.update(struct.pack('!I', 0)) + long_time = time + long(0) + upper_time = (long_time >> 32) & long(0xffff) + lower_time = long_time & long(0xffffffff) + time_mac = struct.pack('!HIH', upper_time, lower_time, fudge) + pre_mac = algorithm_name + time_mac + ol = len(other_data) + if ol > 65535: + raise ValueError('TSIG Other Data is > 65535 bytes') + post_mac = struct.pack('!HH', error, ol) + other_data + if first: + ctx.update(pre_mac) + ctx.update(post_mac) + else: + ctx.update(time_mac) + mac = ctx.digest() + mpack = struct.pack('!H', len(mac)) + tsig_rdata = pre_mac + mpack + mac + id + post_mac + if multi: + ctx = hmac.new(secret, digestmod=digestmod) + ml = len(mac) + ctx.update(struct.pack('!H', ml)) + ctx.update(mac) + else: + ctx = None + return (tsig_rdata, mac, ctx) + + +def hmac_md5(wire, keyname, secret, time, fudge, original_id, error, + other_data, request_mac, ctx=None, multi=False, first=True, + algorithm=default_algorithm): + return sign(wire, keyname, secret, time, fudge, original_id, error, + other_data, request_mac, ctx, multi, first, algorithm) + + +def validate(wire, keyname, secret, now, request_mac, tsig_start, tsig_rdata, + tsig_rdlen, ctx=None, multi=False, first=True): + """Validate the specified TSIG rdata against the other input parameters. + + @raises FormError: The TSIG is badly formed. + @raises BadTime: There is too much time skew between the client and the + server. + @raises BadSignature: The TSIG signature did not validate + @rtype: hmac.HMAC object""" + + (adcount,) = struct.unpack("!H", wire[10:12]) + if adcount == 0: + raise dns.exception.FormError + adcount -= 1 + new_wire = wire[0:10] + struct.pack("!H", adcount) + wire[12:tsig_start] + current = tsig_rdata + (aname, used) = dns.name.from_wire(wire, current) + current = current + used + (upper_time, lower_time, fudge, mac_size) = \ + struct.unpack("!HIHH", wire[current:current + 10]) + time = ((upper_time + long(0)) << 32) + (lower_time + long(0)) + current += 10 + mac = wire[current:current + mac_size] + current += mac_size + (original_id, error, other_size) = \ + struct.unpack("!HHH", wire[current:current + 6]) + current += 6 + other_data = wire[current:current + other_size] + current += other_size + if current != tsig_rdata + tsig_rdlen: + raise dns.exception.FormError + if error != 0: + if error == BADSIG: + raise PeerBadSignature + elif error == BADKEY: + raise PeerBadKey + elif error == BADTIME: + raise PeerBadTime + elif error == BADTRUNC: + raise PeerBadTruncation + else: + raise PeerError('unknown TSIG error code %d' % error) + time_low = time - fudge + time_high = time + fudge + if now < time_low or now > time_high: + raise BadTime + (junk, our_mac, ctx) = sign(new_wire, keyname, secret, time, fudge, + original_id, error, other_data, + request_mac, ctx, multi, first, aname) + if our_mac != mac: + raise BadSignature + return ctx + + +def get_algorithm(algorithm): + """Returns the wire format string and the hash module to use for the + specified TSIG algorithm + + @rtype: (string, hash constructor) + @raises NotImplementedError: I{algorithm} is not supported + """ + + if isinstance(algorithm, string_types): + algorithm = dns.name.from_text(algorithm) + + try: + return (algorithm.to_digestable(), _hashes[algorithm]) + except KeyError: + raise NotImplementedError("TSIG algorithm " + str(algorithm) + + " is not supported") + + +def get_algorithm_and_mac(wire, tsig_rdata, tsig_rdlen): + """Return the tsig algorithm for the specified tsig_rdata + @raises FormError: The TSIG is badly formed. + """ + current = tsig_rdata + (aname, used) = dns.name.from_wire(wire, current) + current = current + used + (upper_time, lower_time, fudge, mac_size) = \ + struct.unpack("!HIHH", wire[current:current + 10]) + current += 10 + mac = wire[current:current + mac_size] + current += mac_size + if current > tsig_rdata + tsig_rdlen: + raise dns.exception.FormError + return (aname, mac) diff --git a/openpype/vendor/python/python_2/dns/tsigkeyring.py b/openpype/vendor/python/python_2/dns/tsigkeyring.py new file mode 100644 index 0000000000..5e5fe1cbe4 --- /dev/null +++ b/openpype/vendor/python/python_2/dns/tsigkeyring.py @@ -0,0 +1,50 @@ +# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license + +# Copyright (C) 2003-2007, 2009-2011 Nominum, Inc. +# +# Permission to use, copy, modify, and distribute this software and its +# documentation for any purpose with or without fee is hereby granted, +# provided that the above copyright notice and this permission notice +# appear in all copies. +# +# THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES +# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR +# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT +# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. + +"""A place to store TSIG keys.""" + +from dns._compat import maybe_decode, maybe_encode + +import base64 + +import dns.name + + +def from_text(textring): + """Convert a dictionary containing (textual DNS name, base64 secret) pairs + into a binary keyring which has (dns.name.Name, binary secret) pairs. + @rtype: dict""" + + keyring = {} + for keytext in textring: + keyname = dns.name.from_text(keytext) + secret = base64.decodestring(maybe_encode(textring[keytext])) + keyring[keyname] = secret + return keyring + + +def to_text(keyring): + """Convert a dictionary containing (dns.name.Name, binary secret) pairs + into a text keyring which has (textual DNS name, base64 secret) pairs. + @rtype: dict""" + + textring = {} + for keyname in keyring: + keytext = maybe_decode(keyname.to_text()) + secret = maybe_decode(base64.encodestring(keyring[keyname])) + textring[keytext] = secret + return textring diff --git a/openpype/vendor/python/python_2/dns/ttl.py b/openpype/vendor/python/python_2/dns/ttl.py new file mode 100644 index 0000000000..4be16bee5b --- /dev/null +++ b/openpype/vendor/python/python_2/dns/ttl.py @@ -0,0 +1,70 @@ +# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license + +# Copyright (C) 2003-2017 Nominum, Inc. +# +# Permission to use, copy, modify, and distribute this software and its +# documentation for any purpose with or without fee is hereby granted, +# provided that the above copyright notice and this permission notice +# appear in all copies. +# +# THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES +# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR +# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT +# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. + +"""DNS TTL conversion.""" + +import dns.exception +from ._compat import long + + +class BadTTL(dns.exception.SyntaxError): + """DNS TTL value is not well-formed.""" + + +def from_text(text): + """Convert the text form of a TTL to an integer. + + The BIND 8 units syntax for TTLs (e.g. '1w6d4h3m10s') is supported. + + *text*, a ``text``, the textual TTL. + + Raises ``dns.ttl.BadTTL`` if the TTL is not well-formed. + + Returns an ``int``. + """ + + if text.isdigit(): + total = long(text) + else: + if not text[0].isdigit(): + raise BadTTL + total = long(0) + current = long(0) + for c in text: + if c.isdigit(): + current *= 10 + current += long(c) + else: + c = c.lower() + if c == 'w': + total += current * long(604800) + elif c == 'd': + total += current * long(86400) + elif c == 'h': + total += current * long(3600) + elif c == 'm': + total += current * long(60) + elif c == 's': + total += current + else: + raise BadTTL("unknown unit '%s'" % c) + current = 0 + if not current == 0: + raise BadTTL("trailing integer") + if total < long(0) or total > long(2147483647): + raise BadTTL("TTL should be between 0 and 2^31 - 1 (inclusive)") + return total diff --git a/openpype/vendor/python/python_2/dns/update.py b/openpype/vendor/python/python_2/dns/update.py new file mode 100644 index 0000000000..96a00d5dbe --- /dev/null +++ b/openpype/vendor/python/python_2/dns/update.py @@ -0,0 +1,279 @@ +# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license + +# Copyright (C) 2003-2007, 2009-2011 Nominum, Inc. +# +# Permission to use, copy, modify, and distribute this software and its +# documentation for any purpose with or without fee is hereby granted, +# provided that the above copyright notice and this permission notice +# appear in all copies. +# +# THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES +# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR +# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT +# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. + +"""DNS Dynamic Update Support""" + + +import dns.message +import dns.name +import dns.opcode +import dns.rdata +import dns.rdataclass +import dns.rdataset +import dns.tsig +from ._compat import string_types + + +class Update(dns.message.Message): + + def __init__(self, zone, rdclass=dns.rdataclass.IN, keyring=None, + keyname=None, keyalgorithm=dns.tsig.default_algorithm): + """Initialize a new DNS Update object. + + See the documentation of the Message class for a complete + description of the keyring dictionary. + + *zone*, a ``dns.name.Name`` or ``text``, the zone which is being + updated. + + *rdclass*, an ``int`` or ``text``, the class of the zone. + + *keyring*, a ``dict``, the TSIG keyring to use. If a + *keyring* is specified but a *keyname* is not, then the key + used will be the first key in the *keyring*. Note that the + order of keys in a dictionary is not defined, so applications + should supply a keyname when a keyring is used, unless they + know the keyring contains only one key. + + *keyname*, a ``dns.name.Name`` or ``None``, the name of the TSIG key + to use; defaults to ``None``. The key must be defined in the keyring. + + *keyalgorithm*, a ``dns.name.Name``, the TSIG algorithm to use. + """ + super(Update, self).__init__() + self.flags |= dns.opcode.to_flags(dns.opcode.UPDATE) + if isinstance(zone, string_types): + zone = dns.name.from_text(zone) + self.origin = zone + if isinstance(rdclass, string_types): + rdclass = dns.rdataclass.from_text(rdclass) + self.zone_rdclass = rdclass + self.find_rrset(self.question, self.origin, rdclass, dns.rdatatype.SOA, + create=True, force_unique=True) + if keyring is not None: + self.use_tsig(keyring, keyname, algorithm=keyalgorithm) + + def _add_rr(self, name, ttl, rd, deleting=None, section=None): + """Add a single RR to the update section.""" + + if section is None: + section = self.authority + covers = rd.covers() + rrset = self.find_rrset(section, name, self.zone_rdclass, rd.rdtype, + covers, deleting, True, True) + rrset.add(rd, ttl) + + def _add(self, replace, section, name, *args): + """Add records. + + *replace* is the replacement mode. If ``False``, + RRs are added to an existing RRset; if ``True``, the RRset + is replaced with the specified contents. The second + argument is the section to add to. The third argument + is always a name. The other arguments can be: + + - rdataset... + + - ttl, rdata... + + - ttl, rdtype, string... + """ + + if isinstance(name, string_types): + name = dns.name.from_text(name, None) + if isinstance(args[0], dns.rdataset.Rdataset): + for rds in args: + if replace: + self.delete(name, rds.rdtype) + for rd in rds: + self._add_rr(name, rds.ttl, rd, section=section) + else: + args = list(args) + ttl = int(args.pop(0)) + if isinstance(args[0], dns.rdata.Rdata): + if replace: + self.delete(name, args[0].rdtype) + for rd in args: + self._add_rr(name, ttl, rd, section=section) + else: + rdtype = args.pop(0) + if isinstance(rdtype, string_types): + rdtype = dns.rdatatype.from_text(rdtype) + if replace: + self.delete(name, rdtype) + for s in args: + rd = dns.rdata.from_text(self.zone_rdclass, rdtype, s, + self.origin) + self._add_rr(name, ttl, rd, section=section) + + def add(self, name, *args): + """Add records. + + The first argument is always a name. The other + arguments can be: + + - rdataset... + + - ttl, rdata... + + - ttl, rdtype, string... + """ + + self._add(False, self.authority, name, *args) + + def delete(self, name, *args): + """Delete records. + + The first argument is always a name. The other + arguments can be: + + - *empty* + + - rdataset... + + - rdata... + + - rdtype, [string...] + """ + + if isinstance(name, string_types): + name = dns.name.from_text(name, None) + if len(args) == 0: + self.find_rrset(self.authority, name, dns.rdataclass.ANY, + dns.rdatatype.ANY, dns.rdatatype.NONE, + dns.rdatatype.ANY, True, True) + elif isinstance(args[0], dns.rdataset.Rdataset): + for rds in args: + for rd in rds: + self._add_rr(name, 0, rd, dns.rdataclass.NONE) + else: + args = list(args) + if isinstance(args[0], dns.rdata.Rdata): + for rd in args: + self._add_rr(name, 0, rd, dns.rdataclass.NONE) + else: + rdtype = args.pop(0) + if isinstance(rdtype, string_types): + rdtype = dns.rdatatype.from_text(rdtype) + if len(args) == 0: + self.find_rrset(self.authority, name, + self.zone_rdclass, rdtype, + dns.rdatatype.NONE, + dns.rdataclass.ANY, + True, True) + else: + for s in args: + rd = dns.rdata.from_text(self.zone_rdclass, rdtype, s, + self.origin) + self._add_rr(name, 0, rd, dns.rdataclass.NONE) + + def replace(self, name, *args): + """Replace records. + + The first argument is always a name. The other + arguments can be: + + - rdataset... + + - ttl, rdata... + + - ttl, rdtype, string... + + Note that if you want to replace the entire node, you should do + a delete of the name followed by one or more calls to add. + """ + + self._add(True, self.authority, name, *args) + + def present(self, name, *args): + """Require that an owner name (and optionally an rdata type, + or specific rdataset) exists as a prerequisite to the + execution of the update. + + The first argument is always a name. + The other arguments can be: + + - rdataset... + + - rdata... + + - rdtype, string... + """ + + if isinstance(name, string_types): + name = dns.name.from_text(name, None) + if len(args) == 0: + self.find_rrset(self.answer, name, + dns.rdataclass.ANY, dns.rdatatype.ANY, + dns.rdatatype.NONE, None, + True, True) + elif isinstance(args[0], dns.rdataset.Rdataset) or \ + isinstance(args[0], dns.rdata.Rdata) or \ + len(args) > 1: + if not isinstance(args[0], dns.rdataset.Rdataset): + # Add a 0 TTL + args = list(args) + args.insert(0, 0) + self._add(False, self.answer, name, *args) + else: + rdtype = args[0] + if isinstance(rdtype, string_types): + rdtype = dns.rdatatype.from_text(rdtype) + self.find_rrset(self.answer, name, + dns.rdataclass.ANY, rdtype, + dns.rdatatype.NONE, None, + True, True) + + def absent(self, name, rdtype=None): + """Require that an owner name (and optionally an rdata type) does + not exist as a prerequisite to the execution of the update.""" + + if isinstance(name, string_types): + name = dns.name.from_text(name, None) + if rdtype is None: + self.find_rrset(self.answer, name, + dns.rdataclass.NONE, dns.rdatatype.ANY, + dns.rdatatype.NONE, None, + True, True) + else: + if isinstance(rdtype, string_types): + rdtype = dns.rdatatype.from_text(rdtype) + self.find_rrset(self.answer, name, + dns.rdataclass.NONE, rdtype, + dns.rdatatype.NONE, None, + True, True) + + def to_wire(self, origin=None, max_size=65535): + """Return a string containing the update in DNS compressed wire + format. + + *origin*, a ``dns.name.Name`` or ``None``, the origin to be + appended to any relative names. If *origin* is ``None``, then + the origin of the ``dns.update.Update`` message object is used + (i.e. the *zone* parameter passed when the Update object was + created). + + *max_size*, an ``int``, the maximum size of the wire format + output; default is 0, which means "the message's request + payload, if nonzero, or 65535". + + Returns a ``binary``. + """ + + if origin is None: + origin = self.origin + return super(Update, self).to_wire(origin, max_size) diff --git a/openpype/vendor/python/python_2/dns/version.py b/openpype/vendor/python/python_2/dns/version.py new file mode 100644 index 0000000000..f116904b46 --- /dev/null +++ b/openpype/vendor/python/python_2/dns/version.py @@ -0,0 +1,43 @@ +# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license + +# Copyright (C) 2003-2017 Nominum, Inc. +# +# Permission to use, copy, modify, and distribute this software and its +# documentation for any purpose with or without fee is hereby granted, +# provided that the above copyright notice and this permission notice +# appear in all copies. +# +# THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES +# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR +# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT +# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. + +"""dnspython release version information.""" + +#: MAJOR +MAJOR = 1 +#: MINOR +MINOR = 16 +#: MICRO +MICRO = 0 +#: RELEASELEVEL +RELEASELEVEL = 0x0f +#: SERIAL +SERIAL = 0 + +if RELEASELEVEL == 0x0f: + #: version + version = '%d.%d.%d' % (MAJOR, MINOR, MICRO) +elif RELEASELEVEL == 0x00: + version = '%d.%d.%dx%d' % \ + (MAJOR, MINOR, MICRO, SERIAL) +else: + version = '%d.%d.%d%x%d' % \ + (MAJOR, MINOR, MICRO, RELEASELEVEL, SERIAL) + +#: hexversion +hexversion = MAJOR << 24 | MINOR << 16 | MICRO << 8 | RELEASELEVEL << 4 | \ + SERIAL diff --git a/openpype/vendor/python/python_2/dns/wiredata.py b/openpype/vendor/python/python_2/dns/wiredata.py new file mode 100644 index 0000000000..ea3c1e67d6 --- /dev/null +++ b/openpype/vendor/python/python_2/dns/wiredata.py @@ -0,0 +1,103 @@ +# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license + +# Copyright (C) 2011,2017 Nominum, Inc. +# +# Permission to use, copy, modify, and distribute this software and its +# documentation for any purpose with or without fee is hereby granted, +# provided that the above copyright notice and this permission notice +# appear in all copies. +# +# THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES +# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR +# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT +# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. + +"""DNS Wire Data Helper""" + +import dns.exception +from ._compat import binary_type, string_types, PY2 + +# Figure out what constant python passes for an unspecified slice bound. +# It's supposed to be sys.maxint, yet on 64-bit windows sys.maxint is 2^31 - 1 +# but Python uses 2^63 - 1 as the constant. Rather than making pointless +# extra comparisons, duplicating code, or weakening WireData, we just figure +# out what constant Python will use. + + +class _SliceUnspecifiedBound(binary_type): + + def __getitem__(self, key): + return key.stop + + if PY2: + def __getslice__(self, i, j): # pylint: disable=getslice-method + return self.__getitem__(slice(i, j)) + +_unspecified_bound = _SliceUnspecifiedBound()[1:] + + +class WireData(binary_type): + # WireData is a binary type with stricter slicing + + def __getitem__(self, key): + try: + if isinstance(key, slice): + # make sure we are not going outside of valid ranges, + # do stricter control of boundaries than python does + # by default + start = key.start + stop = key.stop + + if PY2: + if stop == _unspecified_bound: + # handle the case where the right bound is unspecified + stop = len(self) + + if start < 0 or stop < 0: + raise dns.exception.FormError + # If it's not an empty slice, access left and right bounds + # to make sure they're valid + if start != stop: + super(WireData, self).__getitem__(start) + super(WireData, self).__getitem__(stop - 1) + else: + for index in (start, stop): + if index is None: + continue + elif abs(index) > len(self): + raise dns.exception.FormError + + return WireData(super(WireData, self).__getitem__( + slice(start, stop))) + return bytearray(self.unwrap())[key] + except IndexError: + raise dns.exception.FormError + + if PY2: + def __getslice__(self, i, j): # pylint: disable=getslice-method + return self.__getitem__(slice(i, j)) + + def __iter__(self): + i = 0 + while 1: + try: + yield self[i] + i += 1 + except dns.exception.FormError: + raise StopIteration + + def unwrap(self): + return binary_type(self) + + +def maybe_wrap(wire): + if isinstance(wire, WireData): + return wire + elif isinstance(wire, binary_type): + return WireData(wire) + elif isinstance(wire, string_types): + return WireData(wire.encode()) + raise ValueError("unhandled type %s" % type(wire)) diff --git a/openpype/vendor/python/python_2/dns/zone.py b/openpype/vendor/python/python_2/dns/zone.py new file mode 100644 index 0000000000..1e2fe78168 --- /dev/null +++ b/openpype/vendor/python/python_2/dns/zone.py @@ -0,0 +1,1127 @@ +# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license + +# Copyright (C) 2003-2007, 2009-2011 Nominum, Inc. +# +# Permission to use, copy, modify, and distribute this software and its +# documentation for any purpose with or without fee is hereby granted, +# provided that the above copyright notice and this permission notice +# appear in all copies. +# +# THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES +# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR +# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT +# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. + +"""DNS Zones.""" + +from __future__ import generators + +import sys +import re +import os +from io import BytesIO + +import dns.exception +import dns.name +import dns.node +import dns.rdataclass +import dns.rdatatype +import dns.rdata +import dns.rdtypes.ANY.SOA +import dns.rrset +import dns.tokenizer +import dns.ttl +import dns.grange +from ._compat import string_types, text_type, PY3 + + +class BadZone(dns.exception.DNSException): + + """The DNS zone is malformed.""" + + +class NoSOA(BadZone): + + """The DNS zone has no SOA RR at its origin.""" + + +class NoNS(BadZone): + + """The DNS zone has no NS RRset at its origin.""" + + +class UnknownOrigin(BadZone): + + """The DNS zone's origin is unknown.""" + + +class Zone(object): + + """A DNS zone. + + A Zone is a mapping from names to nodes. The zone object may be + treated like a Python dictionary, e.g. zone[name] will retrieve + the node associated with that name. The I{name} may be a + dns.name.Name object, or it may be a string. In the either case, + if the name is relative it is treated as relative to the origin of + the zone. + + @ivar rdclass: The zone's rdata class; the default is class IN. + @type rdclass: int + @ivar origin: The origin of the zone. + @type origin: dns.name.Name object + @ivar nodes: A dictionary mapping the names of nodes in the zone to the + nodes themselves. + @type nodes: dict + @ivar relativize: should names in the zone be relativized? + @type relativize: bool + @cvar node_factory: the factory used to create a new node + @type node_factory: class or callable + """ + + node_factory = dns.node.Node + + __slots__ = ['rdclass', 'origin', 'nodes', 'relativize'] + + def __init__(self, origin, rdclass=dns.rdataclass.IN, relativize=True): + """Initialize a zone object. + + @param origin: The origin of the zone. + @type origin: dns.name.Name object + @param rdclass: The zone's rdata class; the default is class IN. + @type rdclass: int""" + + if origin is not None: + if isinstance(origin, string_types): + origin = dns.name.from_text(origin) + elif not isinstance(origin, dns.name.Name): + raise ValueError("origin parameter must be convertible to a " + "DNS name") + if not origin.is_absolute(): + raise ValueError("origin parameter must be an absolute name") + self.origin = origin + self.rdclass = rdclass + self.nodes = {} + self.relativize = relativize + + def __eq__(self, other): + """Two zones are equal if they have the same origin, class, and + nodes. + @rtype: bool + """ + + if not isinstance(other, Zone): + return False + if self.rdclass != other.rdclass or \ + self.origin != other.origin or \ + self.nodes != other.nodes: + return False + return True + + def __ne__(self, other): + """Are two zones not equal? + @rtype: bool + """ + + return not self.__eq__(other) + + def _validate_name(self, name): + if isinstance(name, string_types): + name = dns.name.from_text(name, None) + elif not isinstance(name, dns.name.Name): + raise KeyError("name parameter must be convertible to a DNS name") + if name.is_absolute(): + if not name.is_subdomain(self.origin): + raise KeyError( + "name parameter must be a subdomain of the zone origin") + if self.relativize: + name = name.relativize(self.origin) + return name + + def __getitem__(self, key): + key = self._validate_name(key) + return self.nodes[key] + + def __setitem__(self, key, value): + key = self._validate_name(key) + self.nodes[key] = value + + def __delitem__(self, key): + key = self._validate_name(key) + del self.nodes[key] + + def __iter__(self): + return self.nodes.__iter__() + + def iterkeys(self): + if PY3: + return self.nodes.keys() # pylint: disable=dict-keys-not-iterating + else: + return self.nodes.iterkeys() # pylint: disable=dict-iter-method + + def keys(self): + return self.nodes.keys() # pylint: disable=dict-keys-not-iterating + + def itervalues(self): + if PY3: + return self.nodes.values() # pylint: disable=dict-values-not-iterating + else: + return self.nodes.itervalues() # pylint: disable=dict-iter-method + + def values(self): + return self.nodes.values() # pylint: disable=dict-values-not-iterating + + def items(self): + return self.nodes.items() # pylint: disable=dict-items-not-iterating + + iteritems = items + + def get(self, key): + key = self._validate_name(key) + return self.nodes.get(key) + + def __contains__(self, other): + return other in self.nodes + + def find_node(self, name, create=False): + """Find a node in the zone, possibly creating it. + + @param name: the name of the node to find + @type name: dns.name.Name object or string + @param create: should the node be created if it doesn't exist? + @type create: bool + @raises KeyError: the name is not known and create was not specified. + @rtype: dns.node.Node object + """ + + name = self._validate_name(name) + node = self.nodes.get(name) + if node is None: + if not create: + raise KeyError + node = self.node_factory() + self.nodes[name] = node + return node + + def get_node(self, name, create=False): + """Get a node in the zone, possibly creating it. + + This method is like L{find_node}, except it returns None instead + of raising an exception if the node does not exist and creation + has not been requested. + + @param name: the name of the node to find + @type name: dns.name.Name object or string + @param create: should the node be created if it doesn't exist? + @type create: bool + @rtype: dns.node.Node object or None + """ + + try: + node = self.find_node(name, create) + except KeyError: + node = None + return node + + def delete_node(self, name): + """Delete the specified node if it exists. + + It is not an error if the node does not exist. + """ + + name = self._validate_name(name) + if name in self.nodes: + del self.nodes[name] + + def find_rdataset(self, name, rdtype, covers=dns.rdatatype.NONE, + create=False): + """Look for rdata with the specified name and type in the zone, + and return an rdataset encapsulating it. + + The I{name}, I{rdtype}, and I{covers} parameters may be + strings, in which case they will be converted to their proper + type. + + The rdataset returned is not a copy; changes to it will change + the zone. + + KeyError is raised if the name or type are not found. + Use L{get_rdataset} if you want to have None returned instead. + + @param name: the owner name to look for + @type name: DNS.name.Name object or string + @param rdtype: the rdata type desired + @type rdtype: int or string + @param covers: the covered type (defaults to None) + @type covers: int or string + @param create: should the node and rdataset be created if they do not + exist? + @type create: bool + @raises KeyError: the node or rdata could not be found + @rtype: dns.rdataset.Rdataset object + """ + + name = self._validate_name(name) + if isinstance(rdtype, string_types): + rdtype = dns.rdatatype.from_text(rdtype) + if isinstance(covers, string_types): + covers = dns.rdatatype.from_text(covers) + node = self.find_node(name, create) + return node.find_rdataset(self.rdclass, rdtype, covers, create) + + def get_rdataset(self, name, rdtype, covers=dns.rdatatype.NONE, + create=False): + """Look for rdata with the specified name and type in the zone, + and return an rdataset encapsulating it. + + The I{name}, I{rdtype}, and I{covers} parameters may be + strings, in which case they will be converted to their proper + type. + + The rdataset returned is not a copy; changes to it will change + the zone. + + None is returned if the name or type are not found. + Use L{find_rdataset} if you want to have KeyError raised instead. + + @param name: the owner name to look for + @type name: DNS.name.Name object or string + @param rdtype: the rdata type desired + @type rdtype: int or string + @param covers: the covered type (defaults to None) + @type covers: int or string + @param create: should the node and rdataset be created if they do not + exist? + @type create: bool + @rtype: dns.rdataset.Rdataset object or None + """ + + try: + rdataset = self.find_rdataset(name, rdtype, covers, create) + except KeyError: + rdataset = None + return rdataset + + def delete_rdataset(self, name, rdtype, covers=dns.rdatatype.NONE): + """Delete the rdataset matching I{rdtype} and I{covers}, if it + exists at the node specified by I{name}. + + The I{name}, I{rdtype}, and I{covers} parameters may be + strings, in which case they will be converted to their proper + type. + + It is not an error if the node does not exist, or if there is no + matching rdataset at the node. + + If the node has no rdatasets after the deletion, it will itself + be deleted. + + @param name: the owner name to look for + @type name: DNS.name.Name object or string + @param rdtype: the rdata type desired + @type rdtype: int or string + @param covers: the covered type (defaults to None) + @type covers: int or string + """ + + name = self._validate_name(name) + if isinstance(rdtype, string_types): + rdtype = dns.rdatatype.from_text(rdtype) + if isinstance(covers, string_types): + covers = dns.rdatatype.from_text(covers) + node = self.get_node(name) + if node is not None: + node.delete_rdataset(self.rdclass, rdtype, covers) + if len(node) == 0: + self.delete_node(name) + + def replace_rdataset(self, name, replacement): + """Replace an rdataset at name. + + It is not an error if there is no rdataset matching I{replacement}. + + Ownership of the I{replacement} object is transferred to the zone; + in other words, this method does not store a copy of I{replacement} + at the node, it stores I{replacement} itself. + + If the I{name} node does not exist, it is created. + + @param name: the owner name + @type name: DNS.name.Name object or string + @param replacement: the replacement rdataset + @type replacement: dns.rdataset.Rdataset + """ + + if replacement.rdclass != self.rdclass: + raise ValueError('replacement.rdclass != zone.rdclass') + node = self.find_node(name, True) + node.replace_rdataset(replacement) + + def find_rrset(self, name, rdtype, covers=dns.rdatatype.NONE): + """Look for rdata with the specified name and type in the zone, + and return an RRset encapsulating it. + + The I{name}, I{rdtype}, and I{covers} parameters may be + strings, in which case they will be converted to their proper + type. + + This method is less efficient than the similar + L{find_rdataset} because it creates an RRset instead of + returning the matching rdataset. It may be more convenient + for some uses since it returns an object which binds the owner + name to the rdata. + + This method may not be used to create new nodes or rdatasets; + use L{find_rdataset} instead. + + KeyError is raised if the name or type are not found. + Use L{get_rrset} if you want to have None returned instead. + + @param name: the owner name to look for + @type name: DNS.name.Name object or string + @param rdtype: the rdata type desired + @type rdtype: int or string + @param covers: the covered type (defaults to None) + @type covers: int or string + @raises KeyError: the node or rdata could not be found + @rtype: dns.rrset.RRset object + """ + + name = self._validate_name(name) + if isinstance(rdtype, string_types): + rdtype = dns.rdatatype.from_text(rdtype) + if isinstance(covers, string_types): + covers = dns.rdatatype.from_text(covers) + rdataset = self.nodes[name].find_rdataset(self.rdclass, rdtype, covers) + rrset = dns.rrset.RRset(name, self.rdclass, rdtype, covers) + rrset.update(rdataset) + return rrset + + def get_rrset(self, name, rdtype, covers=dns.rdatatype.NONE): + """Look for rdata with the specified name and type in the zone, + and return an RRset encapsulating it. + + The I{name}, I{rdtype}, and I{covers} parameters may be + strings, in which case they will be converted to their proper + type. + + This method is less efficient than the similar L{get_rdataset} + because it creates an RRset instead of returning the matching + rdataset. It may be more convenient for some uses since it + returns an object which binds the owner name to the rdata. + + This method may not be used to create new nodes or rdatasets; + use L{find_rdataset} instead. + + None is returned if the name or type are not found. + Use L{find_rrset} if you want to have KeyError raised instead. + + @param name: the owner name to look for + @type name: DNS.name.Name object or string + @param rdtype: the rdata type desired + @type rdtype: int or string + @param covers: the covered type (defaults to None) + @type covers: int or string + @rtype: dns.rrset.RRset object + """ + + try: + rrset = self.find_rrset(name, rdtype, covers) + except KeyError: + rrset = None + return rrset + + def iterate_rdatasets(self, rdtype=dns.rdatatype.ANY, + covers=dns.rdatatype.NONE): + """Return a generator which yields (name, rdataset) tuples for + all rdatasets in the zone which have the specified I{rdtype} + and I{covers}. If I{rdtype} is dns.rdatatype.ANY, the default, + then all rdatasets will be matched. + + @param rdtype: int or string + @type rdtype: int or string + @param covers: the covered type (defaults to None) + @type covers: int or string + """ + + if isinstance(rdtype, string_types): + rdtype = dns.rdatatype.from_text(rdtype) + if isinstance(covers, string_types): + covers = dns.rdatatype.from_text(covers) + for (name, node) in self.iteritems(): # pylint: disable=dict-iter-method + for rds in node: + if rdtype == dns.rdatatype.ANY or \ + (rds.rdtype == rdtype and rds.covers == covers): + yield (name, rds) + + def iterate_rdatas(self, rdtype=dns.rdatatype.ANY, + covers=dns.rdatatype.NONE): + """Return a generator which yields (name, ttl, rdata) tuples for + all rdatas in the zone which have the specified I{rdtype} + and I{covers}. If I{rdtype} is dns.rdatatype.ANY, the default, + then all rdatas will be matched. + + @param rdtype: int or string + @type rdtype: int or string + @param covers: the covered type (defaults to None) + @type covers: int or string + """ + + if isinstance(rdtype, string_types): + rdtype = dns.rdatatype.from_text(rdtype) + if isinstance(covers, string_types): + covers = dns.rdatatype.from_text(covers) + for (name, node) in self.iteritems(): # pylint: disable=dict-iter-method + for rds in node: + if rdtype == dns.rdatatype.ANY or \ + (rds.rdtype == rdtype and rds.covers == covers): + for rdata in rds: + yield (name, rds.ttl, rdata) + + def to_file(self, f, sorted=True, relativize=True, nl=None): + """Write a zone to a file. + + @param f: file or string. If I{f} is a string, it is treated + as the name of a file to open. + @param sorted: if True, the file will be written with the + names sorted in DNSSEC order from least to greatest. Otherwise + the names will be written in whatever order they happen to have + in the zone's dictionary. + @param relativize: if True, domain names in the output will be + relativized to the zone's origin (if possible). + @type relativize: bool + @param nl: The end of line string. If not specified, the + output will use the platform's native end-of-line marker (i.e. + LF on POSIX, CRLF on Windows, CR on Macintosh). + @type nl: string or None + """ + + if isinstance(f, string_types): + f = open(f, 'wb') + want_close = True + else: + want_close = False + + # must be in this way, f.encoding may contain None, or even attribute + # may not be there + file_enc = getattr(f, 'encoding', None) + if file_enc is None: + file_enc = 'utf-8' + + if nl is None: + nl_b = os.linesep.encode(file_enc) # binary mode, '\n' is not enough + nl = u'\n' + elif isinstance(nl, string_types): + nl_b = nl.encode(file_enc) + else: + nl_b = nl + nl = nl.decode() + + try: + if sorted: + names = list(self.keys()) + names.sort() + else: + names = self.iterkeys() # pylint: disable=dict-iter-method + for n in names: + l = self[n].to_text(n, origin=self.origin, + relativize=relativize) + if isinstance(l, text_type): + l_b = l.encode(file_enc) + else: + l_b = l + l = l.decode() + + try: + f.write(l_b) + f.write(nl_b) + except TypeError: # textual mode + f.write(l) + f.write(nl) + finally: + if want_close: + f.close() + + def to_text(self, sorted=True, relativize=True, nl=None): + """Return a zone's text as though it were written to a file. + + @param sorted: if True, the file will be written with the + names sorted in DNSSEC order from least to greatest. Otherwise + the names will be written in whatever order they happen to have + in the zone's dictionary. + @param relativize: if True, domain names in the output will be + relativized to the zone's origin (if possible). + @type relativize: bool + @param nl: The end of line string. If not specified, the + output will use the platform's native end-of-line marker (i.e. + LF on POSIX, CRLF on Windows, CR on Macintosh). + @type nl: string or None + """ + temp_buffer = BytesIO() + self.to_file(temp_buffer, sorted, relativize, nl) + return_value = temp_buffer.getvalue() + temp_buffer.close() + return return_value + + def check_origin(self): + """Do some simple checking of the zone's origin. + + @raises dns.zone.NoSOA: there is no SOA RR + @raises dns.zone.NoNS: there is no NS RRset + @raises KeyError: there is no origin node + """ + if self.relativize: + name = dns.name.empty + else: + name = self.origin + if self.get_rdataset(name, dns.rdatatype.SOA) is None: + raise NoSOA + if self.get_rdataset(name, dns.rdatatype.NS) is None: + raise NoNS + + +class _MasterReader(object): + + """Read a DNS master file + + @ivar tok: The tokenizer + @type tok: dns.tokenizer.Tokenizer object + @ivar last_ttl: The last seen explicit TTL for an RR + @type last_ttl: int + @ivar last_ttl_known: Has last TTL been detected + @type last_ttl_known: bool + @ivar default_ttl: The default TTL from a $TTL directive or SOA RR + @type default_ttl: int + @ivar default_ttl_known: Has default TTL been detected + @type default_ttl_known: bool + @ivar last_name: The last name read + @type last_name: dns.name.Name object + @ivar current_origin: The current origin + @type current_origin: dns.name.Name object + @ivar relativize: should names in the zone be relativized? + @type relativize: bool + @ivar zone: the zone + @type zone: dns.zone.Zone object + @ivar saved_state: saved reader state (used when processing $INCLUDE) + @type saved_state: list of (tokenizer, current_origin, last_name, file, + last_ttl, last_ttl_known, default_ttl, default_ttl_known) tuples. + @ivar current_file: the file object of the $INCLUDed file being parsed + (None if no $INCLUDE is active). + @ivar allow_include: is $INCLUDE allowed? + @type allow_include: bool + @ivar check_origin: should sanity checks of the origin node be done? + The default is True. + @type check_origin: bool + """ + + def __init__(self, tok, origin, rdclass, relativize, zone_factory=Zone, + allow_include=False, check_origin=True): + if isinstance(origin, string_types): + origin = dns.name.from_text(origin) + self.tok = tok + self.current_origin = origin + self.relativize = relativize + self.last_ttl = 0 + self.last_ttl_known = False + self.default_ttl = 0 + self.default_ttl_known = False + self.last_name = self.current_origin + self.zone = zone_factory(origin, rdclass, relativize=relativize) + self.saved_state = [] + self.current_file = None + self.allow_include = allow_include + self.check_origin = check_origin + + def _eat_line(self): + while 1: + token = self.tok.get() + if token.is_eol_or_eof(): + break + + def _rr_line(self): + """Process one line from a DNS master file.""" + # Name + if self.current_origin is None: + raise UnknownOrigin + token = self.tok.get(want_leading=True) + if not token.is_whitespace(): + self.last_name = dns.name.from_text( + token.value, self.current_origin) + else: + token = self.tok.get() + if token.is_eol_or_eof(): + # treat leading WS followed by EOL/EOF as if they were EOL/EOF. + return + self.tok.unget(token) + name = self.last_name + if not name.is_subdomain(self.zone.origin): + self._eat_line() + return + if self.relativize: + name = name.relativize(self.zone.origin) + token = self.tok.get() + if not token.is_identifier(): + raise dns.exception.SyntaxError + # TTL + try: + ttl = dns.ttl.from_text(token.value) + self.last_ttl = ttl + self.last_ttl_known = True + token = self.tok.get() + if not token.is_identifier(): + raise dns.exception.SyntaxError + except dns.ttl.BadTTL: + if not (self.last_ttl_known or self.default_ttl_known): + raise dns.exception.SyntaxError("Missing default TTL value") + if self.default_ttl_known: + ttl = self.default_ttl + else: + ttl = self.last_ttl + # Class + try: + rdclass = dns.rdataclass.from_text(token.value) + token = self.tok.get() + if not token.is_identifier(): + raise dns.exception.SyntaxError + except dns.exception.SyntaxError: + raise dns.exception.SyntaxError + except Exception: + rdclass = self.zone.rdclass + if rdclass != self.zone.rdclass: + raise dns.exception.SyntaxError("RR class is not zone's class") + # Type + try: + rdtype = dns.rdatatype.from_text(token.value) + except: + raise dns.exception.SyntaxError( + "unknown rdatatype '%s'" % token.value) + n = self.zone.nodes.get(name) + if n is None: + n = self.zone.node_factory() + self.zone.nodes[name] = n + try: + rd = dns.rdata.from_text(rdclass, rdtype, self.tok, + self.current_origin, False) + except dns.exception.SyntaxError: + # Catch and reraise. + (ty, va) = sys.exc_info()[:2] + raise va + except: + # All exceptions that occur in the processing of rdata + # are treated as syntax errors. This is not strictly + # correct, but it is correct almost all of the time. + # We convert them to syntax errors so that we can emit + # helpful filename:line info. + (ty, va) = sys.exc_info()[:2] + raise dns.exception.SyntaxError( + "caught exception {}: {}".format(str(ty), str(va))) + + if not self.default_ttl_known and isinstance(rd, dns.rdtypes.ANY.SOA.SOA): + # The pre-RFC2308 and pre-BIND9 behavior inherits the zone default + # TTL from the SOA minttl if no $TTL statement is present before the + # SOA is parsed. + self.default_ttl = rd.minimum + self.default_ttl_known = True + + rd.choose_relativity(self.zone.origin, self.relativize) + covers = rd.covers() + rds = n.find_rdataset(rdclass, rdtype, covers, True) + rds.add(rd, ttl) + + def _parse_modify(self, side): + # Here we catch everything in '{' '}' in a group so we can replace it + # with ''. + is_generate1 = re.compile("^.*\$({(\+|-?)(\d+),(\d+),(.)}).*$") + is_generate2 = re.compile("^.*\$({(\+|-?)(\d+)}).*$") + is_generate3 = re.compile("^.*\$({(\+|-?)(\d+),(\d+)}).*$") + # Sometimes there are modifiers in the hostname. These come after + # the dollar sign. They are in the form: ${offset[,width[,base]]}. + # Make names + g1 = is_generate1.match(side) + if g1: + mod, sign, offset, width, base = g1.groups() + if sign == '': + sign = '+' + g2 = is_generate2.match(side) + if g2: + mod, sign, offset = g2.groups() + if sign == '': + sign = '+' + width = 0 + base = 'd' + g3 = is_generate3.match(side) + if g3: + mod, sign, offset, width = g1.groups() + if sign == '': + sign = '+' + width = g1.groups()[2] + base = 'd' + + if not (g1 or g2 or g3): + mod = '' + sign = '+' + offset = 0 + width = 0 + base = 'd' + + if base != 'd': + raise NotImplementedError() + + return mod, sign, offset, width, base + + def _generate_line(self): + # range lhs [ttl] [class] type rhs [ comment ] + """Process one line containing the GENERATE statement from a DNS + master file.""" + if self.current_origin is None: + raise UnknownOrigin + + token = self.tok.get() + # Range (required) + try: + start, stop, step = dns.grange.from_text(token.value) + token = self.tok.get() + if not token.is_identifier(): + raise dns.exception.SyntaxError + except: + raise dns.exception.SyntaxError + + # lhs (required) + try: + lhs = token.value + token = self.tok.get() + if not token.is_identifier(): + raise dns.exception.SyntaxError + except: + raise dns.exception.SyntaxError + + # TTL + try: + ttl = dns.ttl.from_text(token.value) + self.last_ttl = ttl + self.last_ttl_known = True + token = self.tok.get() + if not token.is_identifier(): + raise dns.exception.SyntaxError + except dns.ttl.BadTTL: + if not (self.last_ttl_known or self.default_ttl_known): + raise dns.exception.SyntaxError("Missing default TTL value") + if self.default_ttl_known: + ttl = self.default_ttl + else: + ttl = self.last_ttl + # Class + try: + rdclass = dns.rdataclass.from_text(token.value) + token = self.tok.get() + if not token.is_identifier(): + raise dns.exception.SyntaxError + except dns.exception.SyntaxError: + raise dns.exception.SyntaxError + except Exception: + rdclass = self.zone.rdclass + if rdclass != self.zone.rdclass: + raise dns.exception.SyntaxError("RR class is not zone's class") + # Type + try: + rdtype = dns.rdatatype.from_text(token.value) + token = self.tok.get() + if not token.is_identifier(): + raise dns.exception.SyntaxError + except Exception: + raise dns.exception.SyntaxError("unknown rdatatype '%s'" % + token.value) + + # lhs (required) + try: + rhs = token.value + except: + raise dns.exception.SyntaxError + + lmod, lsign, loffset, lwidth, lbase = self._parse_modify(lhs) + rmod, rsign, roffset, rwidth, rbase = self._parse_modify(rhs) + for i in range(start, stop + 1, step): + # +1 because bind is inclusive and python is exclusive + + if lsign == u'+': + lindex = i + int(loffset) + elif lsign == u'-': + lindex = i - int(loffset) + + if rsign == u'-': + rindex = i - int(roffset) + elif rsign == u'+': + rindex = i + int(roffset) + + lzfindex = str(lindex).zfill(int(lwidth)) + rzfindex = str(rindex).zfill(int(rwidth)) + + name = lhs.replace(u'$%s' % (lmod), lzfindex) + rdata = rhs.replace(u'$%s' % (rmod), rzfindex) + + self.last_name = dns.name.from_text(name, self.current_origin) + name = self.last_name + if not name.is_subdomain(self.zone.origin): + self._eat_line() + return + if self.relativize: + name = name.relativize(self.zone.origin) + + n = self.zone.nodes.get(name) + if n is None: + n = self.zone.node_factory() + self.zone.nodes[name] = n + try: + rd = dns.rdata.from_text(rdclass, rdtype, rdata, + self.current_origin, False) + except dns.exception.SyntaxError: + # Catch and reraise. + (ty, va) = sys.exc_info()[:2] + raise va + except: + # All exceptions that occur in the processing of rdata + # are treated as syntax errors. This is not strictly + # correct, but it is correct almost all of the time. + # We convert them to syntax errors so that we can emit + # helpful filename:line info. + (ty, va) = sys.exc_info()[:2] + raise dns.exception.SyntaxError("caught exception %s: %s" % + (str(ty), str(va))) + + rd.choose_relativity(self.zone.origin, self.relativize) + covers = rd.covers() + rds = n.find_rdataset(rdclass, rdtype, covers, True) + rds.add(rd, ttl) + + def read(self): + """Read a DNS master file and build a zone object. + + @raises dns.zone.NoSOA: No SOA RR was found at the zone origin + @raises dns.zone.NoNS: No NS RRset was found at the zone origin + """ + + try: + while 1: + token = self.tok.get(True, True) + if token.is_eof(): + if self.current_file is not None: + self.current_file.close() + if len(self.saved_state) > 0: + (self.tok, + self.current_origin, + self.last_name, + self.current_file, + self.last_ttl, + self.last_ttl_known, + self.default_ttl, + self.default_ttl_known) = self.saved_state.pop(-1) + continue + break + elif token.is_eol(): + continue + elif token.is_comment(): + self.tok.get_eol() + continue + elif token.value[0] == u'$': + c = token.value.upper() + if c == u'$TTL': + token = self.tok.get() + if not token.is_identifier(): + raise dns.exception.SyntaxError("bad $TTL") + self.default_ttl = dns.ttl.from_text(token.value) + self.default_ttl_known = True + self.tok.get_eol() + elif c == u'$ORIGIN': + self.current_origin = self.tok.get_name() + self.tok.get_eol() + if self.zone.origin is None: + self.zone.origin = self.current_origin + elif c == u'$INCLUDE' and self.allow_include: + token = self.tok.get() + filename = token.value + token = self.tok.get() + if token.is_identifier(): + new_origin =\ + dns.name.from_text(token.value, + self.current_origin) + self.tok.get_eol() + elif not token.is_eol_or_eof(): + raise dns.exception.SyntaxError( + "bad origin in $INCLUDE") + else: + new_origin = self.current_origin + self.saved_state.append((self.tok, + self.current_origin, + self.last_name, + self.current_file, + self.last_ttl, + self.last_ttl_known, + self.default_ttl, + self.default_ttl_known)) + self.current_file = open(filename, 'r') + self.tok = dns.tokenizer.Tokenizer(self.current_file, + filename) + self.current_origin = new_origin + elif c == u'$GENERATE': + self._generate_line() + else: + raise dns.exception.SyntaxError( + "Unknown master file directive '" + c + "'") + continue + self.tok.unget(token) + self._rr_line() + except dns.exception.SyntaxError as detail: + (filename, line_number) = self.tok.where() + if detail is None: + detail = "syntax error" + raise dns.exception.SyntaxError( + "%s:%d: %s" % (filename, line_number, detail)) + + # Now that we're done reading, do some basic checking of the zone. + if self.check_origin: + self.zone.check_origin() + + +def from_text(text, origin=None, rdclass=dns.rdataclass.IN, + relativize=True, zone_factory=Zone, filename=None, + allow_include=False, check_origin=True): + """Build a zone object from a master file format string. + + @param text: the master file format input + @type text: string. + @param origin: The origin of the zone; if not specified, the first + $ORIGIN statement in the master file will determine the origin of the + zone. + @type origin: dns.name.Name object or string + @param rdclass: The zone's rdata class; the default is class IN. + @type rdclass: int + @param relativize: should names be relativized? The default is True + @type relativize: bool + @param zone_factory: The zone factory to use + @type zone_factory: function returning a Zone + @param filename: The filename to emit when describing where an error + occurred; the default is ''. + @type filename: string + @param allow_include: is $INCLUDE allowed? + @type allow_include: bool + @param check_origin: should sanity checks of the origin node be done? + The default is True. + @type check_origin: bool + @raises dns.zone.NoSOA: No SOA RR was found at the zone origin + @raises dns.zone.NoNS: No NS RRset was found at the zone origin + @rtype: dns.zone.Zone object + """ + + # 'text' can also be a file, but we don't publish that fact + # since it's an implementation detail. The official file + # interface is from_file(). + + if filename is None: + filename = '' + tok = dns.tokenizer.Tokenizer(text, filename) + reader = _MasterReader(tok, origin, rdclass, relativize, zone_factory, + allow_include=allow_include, + check_origin=check_origin) + reader.read() + return reader.zone + + +def from_file(f, origin=None, rdclass=dns.rdataclass.IN, + relativize=True, zone_factory=Zone, filename=None, + allow_include=True, check_origin=True): + """Read a master file and build a zone object. + + @param f: file or string. If I{f} is a string, it is treated + as the name of a file to open. + @param origin: The origin of the zone; if not specified, the first + $ORIGIN statement in the master file will determine the origin of the + zone. + @type origin: dns.name.Name object or string + @param rdclass: The zone's rdata class; the default is class IN. + @type rdclass: int + @param relativize: should names be relativized? The default is True + @type relativize: bool + @param zone_factory: The zone factory to use + @type zone_factory: function returning a Zone + @param filename: The filename to emit when describing where an error + occurred; the default is '', or the value of I{f} if I{f} is a + string. + @type filename: string + @param allow_include: is $INCLUDE allowed? + @type allow_include: bool + @param check_origin: should sanity checks of the origin node be done? + The default is True. + @type check_origin: bool + @raises dns.zone.NoSOA: No SOA RR was found at the zone origin + @raises dns.zone.NoNS: No NS RRset was found at the zone origin + @rtype: dns.zone.Zone object + """ + + str_type = string_types + if PY3: + opts = 'r' + else: + opts = 'rU' + + if isinstance(f, str_type): + if filename is None: + filename = f + f = open(f, opts) + want_close = True + else: + if filename is None: + filename = '' + want_close = False + + try: + z = from_text(f, origin, rdclass, relativize, zone_factory, + filename, allow_include, check_origin) + finally: + if want_close: + f.close() + return z + + +def from_xfr(xfr, zone_factory=Zone, relativize=True, check_origin=True): + """Convert the output of a zone transfer generator into a zone object. + + @param xfr: The xfr generator + @type xfr: generator of dns.message.Message objects + @param relativize: should names be relativized? The default is True. + It is essential that the relativize setting matches the one specified + to dns.query.xfr(). + @type relativize: bool + @param check_origin: should sanity checks of the origin node be done? + The default is True. + @type check_origin: bool + @raises dns.zone.NoSOA: No SOA RR was found at the zone origin + @raises dns.zone.NoNS: No NS RRset was found at the zone origin + @rtype: dns.zone.Zone object + """ + + z = None + for r in xfr: + if z is None: + if relativize: + origin = r.origin + else: + origin = r.answer[0].name + rdclass = r.answer[0].rdclass + z = zone_factory(origin, rdclass, relativize=relativize) + for rrset in r.answer: + znode = z.nodes.get(rrset.name) + if not znode: + znode = z.node_factory() + z.nodes[rrset.name] = znode + zrds = znode.find_rdataset(rrset.rdclass, rrset.rdtype, + rrset.covers, True) + zrds.update_ttl(rrset.ttl) + for rd in rrset: + rd.choose_relativity(z.origin, relativize) + zrds.add(rd) + if check_origin: + z.check_origin() + return z diff --git a/openpype/version.py b/openpype/version.py index f85ea13ac8..dedf799055 100644 --- a/openpype/version.py +++ b/openpype/version.py @@ -1,3 +1,3 @@ # -*- coding: utf-8 -*- """Package declaring Pype version.""" -__version__ = "3.0.0-beta" +__version__ = "3.0.0-beta2" diff --git a/poetry.lock b/poetry.lock index 6695a7bcca..41a1f636ec 100644 --- a/poetry.lock +++ b/poetry.lock @@ -80,7 +80,7 @@ python-dateutil = ">=2.7.0" [[package]] name = "astroid" -version = "2.5.1" +version = "2.5.3" description = "An abstract syntax tree for Python with inference support." category = "dev" optional = false @@ -123,14 +123,14 @@ tests_no_zope = ["coverage[toml] (>=5.0.2)", "hypothesis", "pympler", "pytest (> [[package]] name = "autopep8" -version = "1.5.5" +version = "1.5.6" description = "A tool that automatically formats Python code to conform to the PEP 8 style guide" category = "dev" optional = false python-versions = "*" [package.dependencies] -pycodestyle = ">=2.6.0" +pycodestyle = ">=2.7.0" toml = "*" [[package]] @@ -232,6 +232,14 @@ python-versions = "*" [package.extras] test = ["flake8 (==3.7.8)", "hypothesis (==3.55.3)"] +[[package]] +name = "coolname" +version = "1.1.0" +description = "Random name and slug generator" +category = "main" +optional = false +python-versions = "*" + [[package]] name = "coverage" version = "5.5" @@ -245,7 +253,7 @@ toml = ["toml"] [[package]] name = "cryptography" -version = "3.4.6" +version = "3.4.7" description = "cryptography is a package which provides cryptographic recipes and primitives to Python developers." category = "main" optional = false @@ -264,15 +272,24 @@ test = ["pytest (>=6.0)", "pytest-cov", "pytest-subtests", "pytest-xdist", "pret [[package]] name = "cx-freeze" -version = "6.5.3" +version = "6.6" description = "Create standalone executables from Python scripts" category = "dev" optional = false python-versions = ">=3.6" [package.dependencies] +cx-Logging = {version = ">=3.0", markers = "sys_platform == \"win32\""} importlib-metadata = ">=3.1.1" +[[package]] +name = "cx-logging" +version = "3.0" +description = "Python and C interfaces for logging" +category = "dev" +optional = false +python-versions = "*" + [[package]] name = "dnspython" version = "2.1.0" @@ -296,6 +313,18 @@ category = "dev" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" +[[package]] +name = "enlighten" +version = "1.9.0" +description = "Enlighten Progress Bar" +category = "main" +optional = false +python-versions = "*" + +[package.dependencies] +blessed = ">=1.17.7" +prefixed = ">=0.3.2" + [[package]] name = "evdev" version = "1.4.0" @@ -306,17 +335,17 @@ python-versions = "*" [[package]] name = "flake8" -version = "3.8.4" +version = "3.9.1" description = "the modular source code checker: pep8 pyflakes and co" category = "dev" optional = false -python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,>=2.7" +python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,>=2.7" [package.dependencies] importlib-metadata = {version = "*", markers = "python_version < \"3.8\""} mccabe = ">=0.6.0,<0.7.0" -pycodestyle = ">=2.6.0a1,<2.7.0" -pyflakes = ">=2.2.0,<2.3.0" +pycodestyle = ">=2.7.0,<2.8.0" +pyflakes = ">=2.3.0,<2.4.0" [[package]] name = "ftrack-python-api" @@ -346,7 +375,7 @@ python-versions = ">=2.6, !=3.0.*, !=3.1.*, !=3.2.*" [[package]] name = "google-api-core" -version = "1.26.1" +version = "1.26.3" description = "Google API client core library" category = "main" optional = false @@ -384,7 +413,7 @@ uritemplate = ">=3.0.0,<4dev" [[package]] name = "google-auth" -version = "1.27.1" +version = "1.29.0" description = "Google Authentication Library" category = "main" optional = false @@ -399,6 +428,7 @@ six = ">=1.9.0" [package.extras] aiohttp = ["aiohttp (>=3.6.2,<4.0.0dev)"] pyopenssl = ["pyopenssl (>=20.0.0)"] +reauth = ["pyu2f (>=0.1.5)"] [[package]] name = "google-auth-httplib2" @@ -429,7 +459,7 @@ grpc = ["grpcio (>=1.0.0)"] [[package]] name = "httplib2" -version = "0.19.0" +version = "0.19.1" description = "A comprehensive HTTP client library." category = "main" optional = false @@ -456,7 +486,7 @@ python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" [[package]] name = "importlib-metadata" -version = "3.7.2" +version = "4.0.0" description = "Read metadata from Python packages" category = "main" optional = false @@ -468,7 +498,7 @@ zipp = ">=0.5" [package.extras] docs = ["sphinx", "jaraco.packaging (>=8.2)", "rst.linker (>=1.9)"] -testing = ["pytest (>=3.5,!=3.7.3)", "pytest-checkdocs (>=1.2.3)", "pytest-flake8", "pytest-cov", "pytest-enabler", "packaging", "pep517", "pyfakefs", "flufl.flake8", "pytest-black (>=0.3.7)", "pytest-mypy", "importlib-resources (>=1.3)"] +testing = ["pytest (>=4.6)", "pytest-checkdocs (>=2.4)", "pytest-flake8", "pytest-cov", "pytest-enabler (>=1.0.1)", "packaging", "pep517", "pyfakefs", "flufl.flake8", "pytest-black (>=0.3.7)", "pytest-mypy", "importlib-resources (>=1.3)"] [[package]] name = "iniconfig" @@ -480,7 +510,7 @@ python-versions = "*" [[package]] name = "isort" -version = "5.7.0" +version = "5.8.0" description = "A Python utility / library to sort Python imports." category = "dev" optional = false @@ -532,7 +562,7 @@ i18n = ["Babel (>=0.8)"] [[package]] name = "jinxed" -version = "1.0.1" +version = "1.1.0" description = "Jinxed Terminal Library" category = "main" optional = false @@ -579,11 +609,11 @@ testing = ["pytest (>=4.6)", "pytest-checkdocs (>=1.2.3)", "pytest-flake8", "pyt [[package]] name = "lazy-object-proxy" -version = "1.5.2" +version = "1.6.0" description = "A fast and thorough lazy object proxy." category = "dev" optional = false -python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*, !=3.5.*" [[package]] name = "log4mongo" @@ -653,7 +683,7 @@ pyparsing = ">=2.0.2" [[package]] name = "parso" -version = "0.8.1" +version = "0.8.2" description = "A Python Parser" category = "dev" optional = false @@ -676,7 +706,7 @@ six = "*" [[package]] name = "pillow" -version = "8.1.2" +version = "8.2.0" description = "Python Imaging Library (Fork)" category = "main" optional = false @@ -696,9 +726,17 @@ importlib-metadata = {version = ">=0.12", markers = "python_version < \"3.8\""} [package.extras] dev = ["pre-commit", "tox"] +[[package]] +name = "prefixed" +version = "0.3.2" +description = "Prefixed alternative numeric library" +category = "main" +optional = false +python-versions = "*" + [[package]] name = "protobuf" -version = "3.15.6" +version = "3.15.8" description = "Protocol Buffers" category = "main" optional = false @@ -752,7 +790,7 @@ python-versions = "*" [[package]] name = "pycodestyle" -version = "2.6.0" +version = "2.7.0" description = "Python style guide checker" category = "dev" optional = false @@ -780,7 +818,7 @@ snowballstemmer = "*" [[package]] name = "pyflakes" -version = "2.2.0" +version = "2.3.1" description = "passive checker of Python programs" category = "dev" optional = false @@ -796,14 +834,14 @@ python-versions = ">=3.5" [[package]] name = "pylint" -version = "2.7.2" +version = "2.7.4" description = "python code static checker" category = "dev" optional = false python-versions = "~=3.6" [package.dependencies] -astroid = ">=2.5.1,<2.6" +astroid = ">=2.5.2,<2.7" colorama = {version = "*", markers = "sys_platform == \"win32\""} isort = ">=4.2.5,<6" mccabe = ">=0.6,<0.7" @@ -921,7 +959,7 @@ python-versions = ">=3.5" [[package]] name = "pytest" -version = "6.2.2" +version = "6.2.3" description = "pytest: simple powerful testing with Python" category = "dev" optional = false @@ -1112,7 +1150,7 @@ python-versions = "*" [[package]] name = "sphinx" -version = "3.5.2" +version = "3.5.4" description = "Python documentation generator" category = "dev" optional = false @@ -1122,7 +1160,7 @@ python-versions = ">=3.5" alabaster = ">=0.7,<0.8" babel = ">=1.3" colorama = {version = ">=0.3.5", markers = "sys_platform == \"win32\""} -docutils = ">=0.12" +docutils = ">=0.12,<0.17" imagesize = "*" Jinja2 = ">=2.3" packaging = "*" @@ -1155,13 +1193,14 @@ sphinx = "*" [[package]] name = "sphinx-rtd-theme" -version = "0.5.1" +version = "0.5.2" description = "Read the Docs theme for Sphinx" category = "dev" optional = false python-versions = "*" [package.dependencies] +docutils = "<0.17" sphinx = "*" [package.extras] @@ -1269,22 +1308,9 @@ category = "dev" optional = false python-versions = ">=2.6, !=3.0.*, !=3.1.*, !=3.2.*" -[[package]] -name = "tqdm" -version = "4.59.0" -description = "Fast, Extensible Progress Meter" -category = "dev" -optional = false -python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,>=2.7" - -[package.extras] -dev = ["py-make (>=0.1.0)", "twine", "wheel"] -notebook = ["ipywidgets (>=6)"] -telegram = ["requests"] - [[package]] name = "typed-ast" -version = "1.4.2" +version = "1.4.3" description = "a fork of Python 2 and 3 ast modules with type comment support" category = "dev" optional = false @@ -1308,16 +1334,16 @@ python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" [[package]] name = "urllib3" -version = "1.26.3" +version = "1.26.4" description = "HTTP library with thread-safe connection pooling, file post, and more." category = "main" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*, <4" [package.extras] -brotli = ["brotlipy (>=0.6.0)"] secure = ["pyOpenSSL (>=0.14)", "cryptography (>=1.3.4)", "idna (>=2.0.0)", "certifi", "ipaddress"] socks = ["PySocks (>=1.5.6,!=1.5.7,<2.0)"] +brotli = ["brotlipy (>=0.6.0)"] [[package]] name = "wcwidth" @@ -1348,7 +1374,7 @@ python-versions = "*" [[package]] name = "wsrpc-aiohttp" -version = "3.1.1" +version = "3.1.2" description = "WSRPC is the RPC over WebSocket for aiohttp" category = "main" optional = false @@ -1391,7 +1417,7 @@ testing = ["pytest (>=4.6)", "pytest-checkdocs (>=1.2.3)", "pytest-flake8", "pyt [metadata] lock-version = "1.1" python-versions = "3.7.*" -content-hash = "4905515073ad2bf2a8517d513d68e48669b6a829f24e540b2dd60bc70cbea26b" +content-hash = "80fde42aade7fc90bb68d85f0d9b3feb27fc3744d72eb5af6a11b6c9d9836aca" [metadata.files] acre = [] @@ -1455,8 +1481,8 @@ arrow = [ {file = "arrow-0.17.0.tar.gz", hash = "sha256:ff08d10cda1d36c68657d6ad20d74fbea493d980f8b2d45344e00d6ed2bf6ed4"}, ] astroid = [ - {file = "astroid-2.5.1-py3-none-any.whl", hash = "sha256:21d735aab248253531bb0f1e1e6d068f0ee23533e18ae8a6171ff892b98297cf"}, - {file = "astroid-2.5.1.tar.gz", hash = "sha256:cfc35498ee64017be059ceffab0a25bedf7548ab76f2bea691c5565896e7128d"}, + {file = "astroid-2.5.3-py3-none-any.whl", hash = "sha256:bea3f32799fbb8581f58431c12591bc20ce11cbc90ad82e2ea5717d94f2080d5"}, + {file = "astroid-2.5.3.tar.gz", hash = "sha256:ad63b8552c70939568966811a088ef0bc880f99a24a00834abd0e3681b514f91"}, ] async-timeout = [ {file = "async-timeout-3.0.1.tar.gz", hash = "sha256:0c3c816a028d47f659d6ff5c745cb2acf1f966da1fe5c19c77a70282b25f4c5f"}, @@ -1471,8 +1497,8 @@ attrs = [ {file = "attrs-20.3.0.tar.gz", hash = "sha256:832aa3cde19744e49938b91fea06d69ecb9e649c93ba974535d08ad92164f700"}, ] autopep8 = [ - {file = "autopep8-1.5.5-py2.py3-none-any.whl", hash = "sha256:9e136c472c475f4ee4978b51a88a494bfcd4e3ed17950a44a988d9e434837bea"}, - {file = "autopep8-1.5.5.tar.gz", hash = "sha256:cae4bc0fb616408191af41d062d7ec7ef8679c7f27b068875ca3a9e2878d5443"}, + {file = "autopep8-1.5.6-py2.py3-none-any.whl", hash = "sha256:f01b06a6808bc31698db907761e5890eb2295e287af53f6693b39ce55454034a"}, + {file = "autopep8-1.5.6.tar.gz", hash = "sha256:5454e6e9a3d02aae38f866eec0d9a7de4ab9f93c10a273fb0340f3d6d09f7514"}, ] babel = [ {file = "Babel-2.9.0-py2.py3-none-any.whl", hash = "sha256:9d35c22fcc79893c3ecc85ac4a56cde1ecf3f19c540bba0922308a6c06ca6fa5"}, @@ -1549,6 +1575,10 @@ commonmark = [ {file = "commonmark-0.9.1-py2.py3-none-any.whl", hash = "sha256:da2f38c92590f83de410ba1a3cbceafbc74fee9def35f9251ba9a971d6d66fd9"}, {file = "commonmark-0.9.1.tar.gz", hash = "sha256:452f9dc859be7f06631ddcb328b6919c67984aca654e5fefb3914d54691aed60"}, ] +coolname = [ + {file = "coolname-1.1.0-py2.py3-none-any.whl", hash = "sha256:e6a83a0ac88640f4f3d2070438dbe112fe80cfebc119c93bd402976ec84c0978"}, + {file = "coolname-1.1.0.tar.gz", hash = "sha256:410fe6ea9999bf96f2856ef0c726d5f38782bbefb7bb1aca0e91e0dc98ed09e3"}, +] coverage = [ {file = "coverage-5.5-cp27-cp27m-macosx_10_9_x86_64.whl", hash = "sha256:b6d534e4b2ab35c9f93f46229363e17f63c53ad01330df9f2d6bd1187e5eaacf"}, {file = "coverage-5.5-cp27-cp27m-manylinux1_i686.whl", hash = "sha256:b7895207b4c843c76a25ab8c1e866261bcfe27bfaa20c192de5190121770672b"}, @@ -1604,29 +1634,44 @@ coverage = [ {file = "coverage-5.5.tar.gz", hash = "sha256:ebe78fe9a0e874362175b02371bdfbee64d8edc42a044253ddf4ee7d3c15212c"}, ] cryptography = [ - {file = "cryptography-3.4.6-cp36-abi3-macosx_10_10_x86_64.whl", hash = "sha256:57ad77d32917bc55299b16d3b996ffa42a1c73c6cfa829b14043c561288d2799"}, - {file = "cryptography-3.4.6-cp36-abi3-macosx_11_0_arm64.whl", hash = "sha256:4169a27b818de4a1860720108b55a2801f32b6ae79e7f99c00d79f2a2822eeb7"}, - {file = "cryptography-3.4.6-cp36-abi3-manylinux2010_x86_64.whl", hash = "sha256:93cfe5b7ff006de13e1e89830810ecbd014791b042cbe5eec253be11ac2b28f3"}, - {file = "cryptography-3.4.6-cp36-abi3-manylinux2014_aarch64.whl", hash = "sha256:5ecf2bcb34d17415e89b546dbb44e73080f747e504273e4d4987630493cded1b"}, - {file = "cryptography-3.4.6-cp36-abi3-manylinux2014_x86_64.whl", hash = "sha256:fec7fb46b10da10d9e1d078d1ff8ed9e05ae14f431fdbd11145edd0550b9a964"}, - {file = "cryptography-3.4.6-cp36-abi3-win32.whl", hash = "sha256:df186fcbf86dc1ce56305becb8434e4b6b7504bc724b71ad7a3239e0c9d14ef2"}, - {file = "cryptography-3.4.6-cp36-abi3-win_amd64.whl", hash = "sha256:66b57a9ca4b3221d51b237094b0303843b914b7d5afd4349970bb26518e350b0"}, - {file = "cryptography-3.4.6-pp36-pypy36_pp73-manylinux2010_x86_64.whl", hash = "sha256:066bc53f052dfeda2f2d7c195cf16fb3e5ff13e1b6b7415b468514b40b381a5b"}, - {file = "cryptography-3.4.6-pp36-pypy36_pp73-manylinux2014_x86_64.whl", hash = "sha256:600cf9bfe75e96d965509a4c0b2b183f74a4fa6f5331dcb40fb7b77b7c2484df"}, - {file = "cryptography-3.4.6-pp37-pypy37_pp73-manylinux2010_x86_64.whl", hash = "sha256:0923ba600d00718d63a3976f23cab19aef10c1765038945628cd9be047ad0336"}, - {file = "cryptography-3.4.6-pp37-pypy37_pp73-manylinux2014_x86_64.whl", hash = "sha256:9e98b452132963678e3ac6c73f7010fe53adf72209a32854d55690acac3f6724"}, - {file = "cryptography-3.4.6.tar.gz", hash = "sha256:2d32223e5b0ee02943f32b19245b61a62db83a882f0e76cc564e1cec60d48f87"}, + {file = "cryptography-3.4.7-cp36-abi3-macosx_10_10_x86_64.whl", hash = "sha256:3d8427734c781ea5f1b41d6589c293089704d4759e34597dce91014ac125aad1"}, + {file = "cryptography-3.4.7-cp36-abi3-macosx_11_0_arm64.whl", hash = "sha256:8e56e16617872b0957d1c9742a3f94b43533447fd78321514abbe7db216aa250"}, + {file = "cryptography-3.4.7-cp36-abi3-manylinux2010_x86_64.whl", hash = "sha256:37340614f8a5d2fb9aeea67fd159bfe4f5f4ed535b1090ce8ec428b2f15a11f2"}, + {file = "cryptography-3.4.7-cp36-abi3-manylinux2014_aarch64.whl", hash = "sha256:240f5c21aef0b73f40bb9f78d2caff73186700bf1bc6b94285699aff98cc16c6"}, + {file = "cryptography-3.4.7-cp36-abi3-manylinux2014_x86_64.whl", hash = "sha256:1e056c28420c072c5e3cb36e2b23ee55e260cb04eee08f702e0edfec3fb51959"}, + {file = "cryptography-3.4.7-cp36-abi3-win32.whl", hash = "sha256:0f1212a66329c80d68aeeb39b8a16d54ef57071bf22ff4e521657b27372e327d"}, + {file = "cryptography-3.4.7-cp36-abi3-win_amd64.whl", hash = "sha256:de4e5f7f68220d92b7637fc99847475b59154b7a1b3868fb7385337af54ac9ca"}, + {file = "cryptography-3.4.7-pp36-pypy36_pp73-manylinux2010_x86_64.whl", hash = "sha256:26965837447f9c82f1855e0bc8bc4fb910240b6e0d16a664bb722df3b5b06873"}, + {file = "cryptography-3.4.7-pp36-pypy36_pp73-manylinux2014_x86_64.whl", hash = "sha256:eb8cc2afe8b05acbd84a43905832ec78e7b3873fb124ca190f574dca7389a87d"}, + {file = "cryptography-3.4.7-pp37-pypy37_pp73-manylinux2010_x86_64.whl", hash = "sha256:7ec5d3b029f5fa2b179325908b9cd93db28ab7b85bb6c1db56b10e0b54235177"}, + {file = "cryptography-3.4.7-pp37-pypy37_pp73-manylinux2014_x86_64.whl", hash = "sha256:ee77aa129f481be46f8d92a1a7db57269a2f23052d5f2433b4621bb457081cc9"}, + {file = "cryptography-3.4.7.tar.gz", hash = "sha256:3d10de8116d25649631977cb37da6cbdd2d6fa0e0281d014a5b7d337255ca713"}, ] cx-freeze = [ - {file = "cx_Freeze-6.5.3-cp36-cp36m-win32.whl", hash = "sha256:0a1babae574546b622303da53e1a9829aa3a7e53e62b41eb260250220f83164b"}, - {file = "cx_Freeze-6.5.3-cp36-cp36m-win_amd64.whl", hash = "sha256:2671e46cd491c181c632df3f0df2847bad7066897faa07eb1d50f60f5082596f"}, - {file = "cx_Freeze-6.5.3-cp37-cp37m-win32.whl", hash = "sha256:abf5f95f914573cdff5bd9845144977b875fc655417d0e66f935865af1de64d5"}, - {file = "cx_Freeze-6.5.3-cp37-cp37m-win_amd64.whl", hash = "sha256:65c4560bc7b18e2a7bbe3546313cbc01d3fca244d199b39508cfa2ae561887ce"}, - {file = "cx_Freeze-6.5.3-cp38-cp38-win32.whl", hash = "sha256:7e2592fe1b65bd45c729934b391579fde5aed6b4c9e3e4d990738fc7fec718ea"}, - {file = "cx_Freeze-6.5.3-cp38-cp38-win_amd64.whl", hash = "sha256:d3bb71349dace28e545eb1e4549255f0dd915f925f8505b1a342b3d2fbd4734b"}, - {file = "cx_Freeze-6.5.3-cp39-cp39-win32.whl", hash = "sha256:df3872d8e8f87a3f89e6758bed130b5b95ee7473054e2a7eee5b1a8d1c4ecf9e"}, - {file = "cx_Freeze-6.5.3-cp39-cp39-win_amd64.whl", hash = "sha256:507bbaace2fd27edb0e6b024898ab2e4831d45d7238264f578a5e4fa70f065e5"}, - {file = "cx_Freeze-6.5.3.tar.gz", hash = "sha256:e0d03cabcdf9b9c21354807ed9f06fa9481a8fd5a0838968a830f01a70820ff1"}, + {file = "cx_Freeze-6.6-cp36-cp36m-win32.whl", hash = "sha256:b3d3a6bcd1a07c50b4e1c907f14842642156110e63a99cd5c73b8a24751e9b97"}, + {file = "cx_Freeze-6.6-cp36-cp36m-win_amd64.whl", hash = "sha256:1935266ec644ea4f7e584985f44cefc0622a449a09980d990833a1a2afcadac8"}, + {file = "cx_Freeze-6.6-cp37-cp37m-win32.whl", hash = "sha256:1eac2b0f254319cc641ce25bd83337effd7936092562fde701f3ffb40e0274ec"}, + {file = "cx_Freeze-6.6-cp37-cp37m-win_amd64.whl", hash = "sha256:2bc46ef6d510811b6002f34a3ae4cbfdea44e18644febd2a404d3ee8e48a9fc4"}, + {file = "cx_Freeze-6.6-cp38-cp38-win32.whl", hash = "sha256:46eb50ebc46f7ae236d16c6a52671ab0f7bb479bea668da19f4b6de3cc413e9e"}, + {file = "cx_Freeze-6.6-cp38-cp38-win_amd64.whl", hash = "sha256:8c3b00476ce385bb58595bffce55aed031e5a6e16ab6e14d8bee9d1d569e46c3"}, + {file = "cx_Freeze-6.6-cp39-cp39-win32.whl", hash = "sha256:6e9340cbcf52d4836980ecc83ddba4f7704ff6654dd41168c146b74f512977ce"}, + {file = "cx_Freeze-6.6-cp39-cp39-win_amd64.whl", hash = "sha256:2fcf1c8b77ae5c06f45be3a9aff79e1dd808c0d624e97561f840dec5ea9b214a"}, + {file = "cx_Freeze-6.6.tar.gz", hash = "sha256:c4af8ad3f7e7d71e291c1dec5d0fb26bbe92df834b098ed35434c901fbd6762f"}, +] +cx-logging = [ + {file = "cx_Logging-3.0-cp36-cp36m-manylinux1_x86_64.whl", hash = "sha256:9fcd297e5c51470521c47eff0f86ba844aeca6be97e13c3e2114ebdf03fa3c96"}, + {file = "cx_Logging-3.0-cp36-cp36m-win32.whl", hash = "sha256:0df4be47c5022cc54316949e283403214568ef599817ced0c0972183d6d4fabb"}, + {file = "cx_Logging-3.0-cp36-cp36m-win_amd64.whl", hash = "sha256:203ca92ee7c15d5dfe1fcdfcef7b39d0123eba5c6d8c2388b6e7db6b961a5362"}, + {file = "cx_Logging-3.0-cp37-cp37m-manylinux1_x86_64.whl", hash = "sha256:20daa71b2a30f61d09bcf55dbda002c10f0c7c691f53cb393fc6485410fa2484"}, + {file = "cx_Logging-3.0-cp37-cp37m-win32.whl", hash = "sha256:5be5f905e8d34a3326e28d428674cdc2d57912fdf6e25b8676d63f76294eb4e0"}, + {file = "cx_Logging-3.0-cp37-cp37m-win_amd64.whl", hash = "sha256:04e4b61e2636dc8ae135937655af6626362aefc7f6175e86888a244b61001823"}, + {file = "cx_Logging-3.0-cp38-cp38-manylinux1_x86_64.whl", hash = "sha256:1bf0ebc79a7baa331c7deaf57088c234b82710286dfad453ff0c55eee0122b72"}, + {file = "cx_Logging-3.0-cp38-cp38-win32.whl", hash = "sha256:d98a59a47e99fa430b3f6d2a979e27509852d2c43e204f43bd0168e7ec97f469"}, + {file = "cx_Logging-3.0-cp38-cp38-win_amd64.whl", hash = "sha256:bb2e91019e5905415f795eef994de60ace5ae186fc4fe3d358e2d8feebb24992"}, + {file = "cx_Logging-3.0-cp39-cp39-manylinux1_x86_64.whl", hash = "sha256:b6f4a9b750e02a180517f779d174a1c7db651981cd37e5623235b87da9774dfd"}, + {file = "cx_Logging-3.0-cp39-cp39-win32.whl", hash = "sha256:e7cca28e8ee4082654b6062cc4d06f83d48f1a7e2d152bab020c9e3e373afb90"}, + {file = "cx_Logging-3.0-cp39-cp39-win_amd64.whl", hash = "sha256:302e9c4f65a936c288a4fa59a90e7e142d9ef994aa29676731acafdcccdbb3f5"}, + {file = "cx_Logging-3.0.tar.gz", hash = "sha256:ba8a7465facf7b98d8f494030fb481a2e8aeee29dc191e10383bb54ed42bdb34"}, ] dnspython = [ {file = "dnspython-2.1.0-py3-none-any.whl", hash = "sha256:95d12f6ef0317118d2a1a6fc49aac65ffec7eb8087474158f42f26a639135216"}, @@ -1636,12 +1681,16 @@ docutils = [ {file = "docutils-0.16-py2.py3-none-any.whl", hash = "sha256:0c5b78adfbf7762415433f5515cd5c9e762339e23369dbe8000d84a4bf4ab3af"}, {file = "docutils-0.16.tar.gz", hash = "sha256:c2de3a60e9e7d07be26b7f2b00ca0309c207e06c100f9cc2a94931fc75a478fc"}, ] +enlighten = [ + {file = "enlighten-1.9.0-py2.py3-none-any.whl", hash = "sha256:5c59e41505702243c6b26437403e371d2a146ac72de5f706376f738ea8f32659"}, + {file = "enlighten-1.9.0.tar.gz", hash = "sha256:539cc308ccc0c3bfb50feb1b2da94c1a1ac21e80fe95e984221de8966d48f428"}, +] evdev = [ {file = "evdev-1.4.0.tar.gz", hash = "sha256:8782740eb1a86b187334c07feb5127d3faa0b236e113206dfe3ae8f77fb1aaf1"}, ] flake8 = [ - {file = "flake8-3.8.4-py2.py3-none-any.whl", hash = "sha256:749dbbd6bfd0cf1318af27bf97a14e28e5ff548ef8e5b1566ccfb25a11e7c839"}, - {file = "flake8-3.8.4.tar.gz", hash = "sha256:aadae8761ec651813c24be05c6f7b4680857ef6afaae4651a4eccaef97ce6c3b"}, + {file = "flake8-3.9.1-py2.py3-none-any.whl", hash = "sha256:3b9f848952dddccf635be78098ca75010f073bfe14d2c6bda867154bea728d2a"}, + {file = "flake8-3.9.1.tar.gz", hash = "sha256:1aa8990be1e689d96c745c5682b687ea49f2e05a443aff1f8251092b0014e378"}, ] ftrack-python-api = [ {file = "ftrack-python-api-2.0.0.tar.gz", hash = "sha256:dd6f02c31daf5a10078196dc9eac4671e4297c762fbbf4df98de668ac12281d9"}, @@ -1651,16 +1700,16 @@ future = [ {file = "future-0.18.2.tar.gz", hash = "sha256:b1bead90b70cf6ec3f0710ae53a525360fa360d306a86583adc6bf83a4db537d"}, ] google-api-core = [ - {file = "google-api-core-1.26.1.tar.gz", hash = "sha256:23b0df512c4cc8729793f8992edb350e3211f5fd0ec007afb1599864b421beef"}, - {file = "google_api_core-1.26.1-py2.py3-none-any.whl", hash = "sha256:c383206f0f87545d3e658c4f8dc3b18a8457610fdbd791a15757c5b42d1e0e7f"}, + {file = "google-api-core-1.26.3.tar.gz", hash = "sha256:b914345c7ea23861162693a27703bab804a55504f7e6e9abcaff174d80df32ac"}, + {file = "google_api_core-1.26.3-py2.py3-none-any.whl", hash = "sha256:099762d4b4018cd536bcf85136bf337957da438807572db52f21dc61251be089"}, ] google-api-python-client = [ {file = "google-api-python-client-1.12.8.tar.gz", hash = "sha256:f3b9684442eec2cfe9f9bb48e796ef919456b82142c7528c5fd527e5224f08bb"}, {file = "google_api_python_client-1.12.8-py2.py3-none-any.whl", hash = "sha256:3c4c4ca46b5c21196bec7ee93453443e477d82cbfa79234d1ce0645f81170eaf"}, ] google-auth = [ - {file = "google-auth-1.27.1.tar.gz", hash = "sha256:d8958af6968e4ecd599f82357ebcfeb126f826ed0656126ad68416f810f7531e"}, - {file = "google_auth-1.27.1-py2.py3-none-any.whl", hash = "sha256:63a5636d7eacfe6ef5b7e36e112b3149fa1c5b5ad77dd6df54910459bcd6b89f"}, + {file = "google-auth-1.29.0.tar.gz", hash = "sha256:010f011c4e27d3d5eb01106fba6aac39d164842dfcd8709955c4638f5b11ccf8"}, + {file = "google_auth-1.29.0-py2.py3-none-any.whl", hash = "sha256:f30a672a64d91cc2e3137765d088c5deec26416246f7a9e956eaf69a8d7ed49c"}, ] google-auth-httplib2 = [ {file = "google-auth-httplib2-0.1.0.tar.gz", hash = "sha256:a07c39fd632becacd3f07718dfd6021bf396978f03ad3ce4321d060015cc30ac"}, @@ -1671,8 +1720,8 @@ googleapis-common-protos = [ {file = "googleapis_common_protos-1.53.0-py2.py3-none-any.whl", hash = "sha256:f6d561ab8fb16b30020b940e2dd01cd80082f4762fa9f3ee670f4419b4b8dbd0"}, ] httplib2 = [ - {file = "httplib2-0.19.0-py3-none-any.whl", hash = "sha256:749c32603f9bf16c1277f59531d502e8f1c2ca19901ae653b49c4ed698f0820e"}, - {file = "httplib2-0.19.0.tar.gz", hash = "sha256:e0d428dad43c72dbce7d163b7753ffc7a39c097e6788ef10f4198db69b92f08e"}, + {file = "httplib2-0.19.1-py3-none-any.whl", hash = "sha256:2ad195faf9faf079723f6714926e9a9061f694d07724b846658ce08d40f522b4"}, + {file = "httplib2-0.19.1.tar.gz", hash = "sha256:0b12617eeca7433d4c396a100eaecfa4b08ee99aa881e6df6e257a7aad5d533d"}, ] idna = [ {file = "idna-2.10-py2.py3-none-any.whl", hash = "sha256:b97d804b1e9b523befed77c48dacec60e6dcb0b5391d57af6a65a312a90648c0"}, @@ -1683,16 +1732,16 @@ imagesize = [ {file = "imagesize-1.2.0.tar.gz", hash = "sha256:b1f6b5a4eab1f73479a50fb79fcf729514a900c341d8503d62a62dbc4127a2b1"}, ] importlib-metadata = [ - {file = "importlib_metadata-3.7.2-py3-none-any.whl", hash = "sha256:407d13f55dc6f2a844e62325d18ad7019a436c4bfcaee34cda35f2be6e7c3e34"}, - {file = "importlib_metadata-3.7.2.tar.gz", hash = "sha256:18d5ff601069f98d5d605b6a4b50c18a34811d655c55548adc833e687289acde"}, + {file = "importlib_metadata-4.0.0-py3-none-any.whl", hash = "sha256:19192b88d959336bfa6bdaaaef99aeafec179eca19c47c804e555703ee5f07ef"}, + {file = "importlib_metadata-4.0.0.tar.gz", hash = "sha256:2e881981c9748d7282b374b68e759c87745c25427b67ecf0cc67fb6637a1bff9"}, ] iniconfig = [ {file = "iniconfig-1.1.1-py2.py3-none-any.whl", hash = "sha256:011e24c64b7f47f6ebd835bb12a743f2fbe9a26d4cecaa7f53bc4f35ee9da8b3"}, {file = "iniconfig-1.1.1.tar.gz", hash = "sha256:bc3af051d7d14b2ee5ef9969666def0cd1a000e121eaea580d4a313df4b37f32"}, ] isort = [ - {file = "isort-5.7.0-py3-none-any.whl", hash = "sha256:fff4f0c04e1825522ce6949973e83110a6e907750cd92d128b0d14aaaadbffdc"}, - {file = "isort-5.7.0.tar.gz", hash = "sha256:c729845434366216d320e936b8ad6f9d681aab72dc7cbc2d51bedc3582f3ad1e"}, + {file = "isort-5.8.0-py3-none-any.whl", hash = "sha256:2bb1680aad211e3c9944dbce1d4ba09a989f04e238296c87fe2139faa26d655d"}, + {file = "isort-5.8.0.tar.gz", hash = "sha256:0a943902919f65c5684ac4e0154b1ad4fac6dcaa5d9f3426b732f1c8b5419be6"}, ] jedi = [ {file = "jedi-0.13.3-py2.py3-none-any.whl", hash = "sha256:2c6bcd9545c7d6440951b12b44d373479bf18123a401a52025cf98563fbd826c"}, @@ -1707,8 +1756,8 @@ jinja2 = [ {file = "Jinja2-2.11.3.tar.gz", hash = "sha256:a6d58433de0ae800347cab1fa3043cebbabe8baa9d29e668f1c768cb87a333c6"}, ] jinxed = [ - {file = "jinxed-1.0.1-py2.py3-none-any.whl", hash = "sha256:602f2cb3523c1045456f7b6d79ac19297fd8e933ae3bd9159845dc857f2d519c"}, - {file = "jinxed-1.0.1.tar.gz", hash = "sha256:bc523c74fe676c99ccc69c68c2dcd7d4d2d7b2541f6dbef74ef211aedd8ad0d3"}, + {file = "jinxed-1.1.0-py2.py3-none-any.whl", hash = "sha256:6a61ccf963c16aa885304f27e6e5693783676897cea0c7f223270c8b8e78baf8"}, + {file = "jinxed-1.1.0.tar.gz", hash = "sha256:d8f1731f134e9e6b04d95095845ae6c10eb15cb223a5f0cabdea87d4a279c305"}, ] jsonschema = [ {file = "jsonschema-3.2.0-py2.py3-none-any.whl", hash = "sha256:4e5b3cf8216f577bee9ce139cbe72eca3ea4f292ec60928ff24758ce626cd163"}, @@ -1719,30 +1768,28 @@ keyring = [ {file = "keyring-22.4.0.tar.gz", hash = "sha256:d981e02d134cc3d636a716fbc3ca967bc9609bae5dc21b0063e4409355993ddf"}, ] lazy-object-proxy = [ - {file = "lazy-object-proxy-1.5.2.tar.gz", hash = "sha256:5944a9b95e97de1980c65f03b79b356f30a43de48682b8bdd90aa5089f0ec1f4"}, - {file = "lazy_object_proxy-1.5.2-cp27-cp27m-macosx_10_14_x86_64.whl", hash = "sha256:e960e8be509e8d6d618300a6c189555c24efde63e85acaf0b14b2cd1ac743315"}, - {file = "lazy_object_proxy-1.5.2-cp27-cp27mu-manylinux1_x86_64.whl", hash = "sha256:522b7c94b524389f4a4094c4bf04c2b02228454ddd17c1a9b2801fac1d754871"}, - {file = "lazy_object_proxy-1.5.2-cp35-cp35m-manylinux1_x86_64.whl", hash = "sha256:3782931963dc89e0e9a0ae4348b44762e868ea280e4f8c233b537852a8996ab9"}, - {file = "lazy_object_proxy-1.5.2-cp35-cp35m-manylinux2014_aarch64.whl", hash = "sha256:429c4d1862f3fc37cd56304d880f2eae5bd0da83bdef889f3bd66458aac49128"}, - {file = "lazy_object_proxy-1.5.2-cp35-cp35m-win32.whl", hash = "sha256:cd1bdace1a8762534e9a36c073cd54e97d517a17d69a17985961265be6d22847"}, - {file = "lazy_object_proxy-1.5.2-cp35-cp35m-win_amd64.whl", hash = "sha256:ddbdcd10eb999d7ab292677f588b658372aadb9a52790f82484a37127a390108"}, - {file = "lazy_object_proxy-1.5.2-cp36-cp36m-manylinux1_x86_64.whl", hash = "sha256:ecb5dd5990cec6e7f5c9c1124a37cb2c710c6d69b0c1a5c4aa4b35eba0ada068"}, - {file = "lazy_object_proxy-1.5.2-cp36-cp36m-manylinux2014_aarch64.whl", hash = "sha256:b6577f15d5516d7d209c1a8cde23062c0f10625f19e8dc9fb59268859778d7d7"}, - {file = "lazy_object_proxy-1.5.2-cp36-cp36m-win32.whl", hash = "sha256:c8fe2d6ff0ff583784039d0255ea7da076efd08507f2be6f68583b0da32e3afb"}, - {file = "lazy_object_proxy-1.5.2-cp36-cp36m-win_amd64.whl", hash = "sha256:fa5b2dee0e231fa4ad117be114251bdfe6afe39213bd629d43deb117b6a6c40a"}, - {file = "lazy_object_proxy-1.5.2-cp37-cp37m-manylinux1_x86_64.whl", hash = "sha256:1d33d6f789697f401b75ce08e73b1de567b947740f768376631079290118ad39"}, - {file = "lazy_object_proxy-1.5.2-cp37-cp37m-manylinux2014_aarch64.whl", hash = "sha256:57fb5c5504ddd45ed420b5b6461a78f58cbb0c1b0cbd9cd5a43ad30a4a3ee4d0"}, - {file = "lazy_object_proxy-1.5.2-cp37-cp37m-win32.whl", hash = "sha256:e7273c64bccfd9310e9601b8f4511d84730239516bada26a0c9846c9697617ef"}, - {file = "lazy_object_proxy-1.5.2-cp37-cp37m-win_amd64.whl", hash = "sha256:6f4e5e68b7af950ed7fdb594b3f19a0014a3ace0fedb86acb896e140ffb24302"}, - {file = "lazy_object_proxy-1.5.2-cp38-cp38-manylinux1_x86_64.whl", hash = "sha256:cadfa2c2cf54d35d13dc8d231253b7985b97d629ab9ca6e7d672c35539d38163"}, - {file = "lazy_object_proxy-1.5.2-cp38-cp38-manylinux2014_aarch64.whl", hash = "sha256:e7428977763150b4cf83255625a80a23dfdc94d43be7791ce90799d446b4e26f"}, - {file = "lazy_object_proxy-1.5.2-cp38-cp38-win32.whl", hash = "sha256:2f2de8f8ac0be3e40d17730e0600619d35c78c13a099ea91ef7fb4ad944ce694"}, - {file = "lazy_object_proxy-1.5.2-cp38-cp38-win_amd64.whl", hash = "sha256:38c3865bd220bd983fcaa9aa11462619e84a71233bafd9c880f7b1cb753ca7fa"}, - {file = "lazy_object_proxy-1.5.2-cp39-cp39-macosx_10_14_x86_64.whl", hash = "sha256:8a44e9901c0555f95ac401377032f6e6af66d8fc1fbfad77a7a8b1a826e0b93c"}, - {file = "lazy_object_proxy-1.5.2-cp39-cp39-manylinux1_x86_64.whl", hash = "sha256:fa7fb7973c622b9e725bee1db569d2c2ee64d2f9a089201c5e8185d482c7352d"}, - {file = "lazy_object_proxy-1.5.2-cp39-cp39-manylinux2014_aarch64.whl", hash = "sha256:71a1ef23f22fa8437974b2d60fedb947c99a957ad625f83f43fd3de70f77f458"}, - {file = "lazy_object_proxy-1.5.2-cp39-cp39-win32.whl", hash = "sha256:ef3f5e288aa57b73b034ce9c1f1ac753d968f9069cd0742d1d69c698a0167166"}, - {file = "lazy_object_proxy-1.5.2-cp39-cp39-win_amd64.whl", hash = "sha256:37d9c34b96cca6787fe014aeb651217944a967a5b165e2cacb6b858d2997ab84"}, + {file = "lazy-object-proxy-1.6.0.tar.gz", hash = "sha256:489000d368377571c6f982fba6497f2aa13c6d1facc40660963da62f5c379726"}, + {file = "lazy_object_proxy-1.6.0-cp27-cp27m-macosx_10_14_x86_64.whl", hash = "sha256:c6938967f8528b3668622a9ed3b31d145fab161a32f5891ea7b84f6b790be05b"}, + {file = "lazy_object_proxy-1.6.0-cp27-cp27m-win32.whl", hash = "sha256:ebfd274dcd5133e0afae738e6d9da4323c3eb021b3e13052d8cbd0e457b1256e"}, + {file = "lazy_object_proxy-1.6.0-cp27-cp27m-win_amd64.whl", hash = "sha256:ed361bb83436f117f9917d282a456f9e5009ea12fd6de8742d1a4752c3017e93"}, + {file = "lazy_object_proxy-1.6.0-cp27-cp27mu-manylinux1_x86_64.whl", hash = "sha256:d900d949b707778696fdf01036f58c9876a0d8bfe116e8d220cfd4b15f14e741"}, + {file = "lazy_object_proxy-1.6.0-cp36-cp36m-manylinux1_x86_64.whl", hash = "sha256:5743a5ab42ae40caa8421b320ebf3a998f89c85cdc8376d6b2e00bd12bd1b587"}, + {file = "lazy_object_proxy-1.6.0-cp36-cp36m-manylinux2014_aarch64.whl", hash = "sha256:bf34e368e8dd976423396555078def5cfc3039ebc6fc06d1ae2c5a65eebbcde4"}, + {file = "lazy_object_proxy-1.6.0-cp36-cp36m-win32.whl", hash = "sha256:b579f8acbf2bdd9ea200b1d5dea36abd93cabf56cf626ab9c744a432e15c815f"}, + {file = "lazy_object_proxy-1.6.0-cp36-cp36m-win_amd64.whl", hash = "sha256:4f60460e9f1eb632584c9685bccea152f4ac2130e299784dbaf9fae9f49891b3"}, + {file = "lazy_object_proxy-1.6.0-cp37-cp37m-manylinux1_x86_64.whl", hash = "sha256:d7124f52f3bd259f510651450e18e0fd081ed82f3c08541dffc7b94b883aa981"}, + {file = "lazy_object_proxy-1.6.0-cp37-cp37m-manylinux2014_aarch64.whl", hash = "sha256:22ddd618cefe54305df49e4c069fa65715be4ad0e78e8d252a33debf00f6ede2"}, + {file = "lazy_object_proxy-1.6.0-cp37-cp37m-win32.whl", hash = "sha256:9d397bf41caad3f489e10774667310d73cb9c4258e9aed94b9ec734b34b495fd"}, + {file = "lazy_object_proxy-1.6.0-cp37-cp37m-win_amd64.whl", hash = "sha256:24a5045889cc2729033b3e604d496c2b6f588c754f7a62027ad4437a7ecc4837"}, + {file = "lazy_object_proxy-1.6.0-cp38-cp38-manylinux1_x86_64.whl", hash = "sha256:17e0967ba374fc24141738c69736da90e94419338fd4c7c7bef01ee26b339653"}, + {file = "lazy_object_proxy-1.6.0-cp38-cp38-manylinux2014_aarch64.whl", hash = "sha256:410283732af311b51b837894fa2f24f2c0039aa7f220135192b38fcc42bd43d3"}, + {file = "lazy_object_proxy-1.6.0-cp38-cp38-win32.whl", hash = "sha256:85fb7608121fd5621cc4377a8961d0b32ccf84a7285b4f1d21988b2eae2868e8"}, + {file = "lazy_object_proxy-1.6.0-cp38-cp38-win_amd64.whl", hash = "sha256:d1c2676e3d840852a2de7c7d5d76407c772927addff8d742b9808fe0afccebdf"}, + {file = "lazy_object_proxy-1.6.0-cp39-cp39-macosx_10_14_x86_64.whl", hash = "sha256:b865b01a2e7f96db0c5d12cfea590f98d8c5ba64ad222300d93ce6ff9138bcad"}, + {file = "lazy_object_proxy-1.6.0-cp39-cp39-manylinux1_x86_64.whl", hash = "sha256:4732c765372bd78a2d6b2150a6e99d00a78ec963375f236979c0626b97ed8e43"}, + {file = "lazy_object_proxy-1.6.0-cp39-cp39-manylinux2014_aarch64.whl", hash = "sha256:9698110e36e2df951c7c36b6729e96429c9c32b3331989ef19976592c5f3c77a"}, + {file = "lazy_object_proxy-1.6.0-cp39-cp39-win32.whl", hash = "sha256:1fee665d2638491f4d6e55bd483e15ef21f6c8c2095f235fef72601021e64f61"}, + {file = "lazy_object_proxy-1.6.0-cp39-cp39-win_amd64.whl", hash = "sha256:f5144c75445ae3ca2057faac03fda5a902eff196702b0a24daf1d6ce0650514b"}, ] log4mongo = [ {file = "log4mongo-1.7.0.tar.gz", hash = "sha256:dc374617206162a0b14167fbb5feac01dbef587539a235dadba6200362984a68"}, @@ -1850,73 +1897,77 @@ packaging = [ {file = "packaging-20.9.tar.gz", hash = "sha256:5b327ac1320dc863dca72f4514ecc086f31186744b84a230374cc1fd776feae5"}, ] parso = [ - {file = "parso-0.8.1-py2.py3-none-any.whl", hash = "sha256:15b00182f472319383252c18d5913b69269590616c947747bc50bf4ac768f410"}, - {file = "parso-0.8.1.tar.gz", hash = "sha256:8519430ad07087d4c997fda3a7918f7cfa27cb58972a8c89c2a0295a1c940e9e"}, + {file = "parso-0.8.2-py2.py3-none-any.whl", hash = "sha256:a8c4922db71e4fdb90e0d0bc6e50f9b273d3397925e5e60a717e719201778d22"}, + {file = "parso-0.8.2.tar.gz", hash = "sha256:12b83492c6239ce32ff5eed6d3639d6a536170723c6f3f1506869f1ace413398"}, ] pathlib2 = [ {file = "pathlib2-2.3.5-py2.py3-none-any.whl", hash = "sha256:0ec8205a157c80d7acc301c0b18fbd5d44fe655968f5d947b6ecef5290fc35db"}, {file = "pathlib2-2.3.5.tar.gz", hash = "sha256:6cd9a47b597b37cc57de1c05e56fb1a1c9cc9fab04fe78c29acd090418529868"}, ] pillow = [ - {file = "Pillow-8.1.2-cp36-cp36m-macosx_10_10_x86_64.whl", hash = "sha256:5cf03b9534aca63b192856aa601c68d0764810857786ea5da652581f3a44c2b0"}, - {file = "Pillow-8.1.2-cp36-cp36m-manylinux1_i686.whl", hash = "sha256:f91b50ad88048d795c0ad004abbe1390aa1882073b1dca10bfd55d0b8cf18ec5"}, - {file = "Pillow-8.1.2-cp36-cp36m-manylinux1_x86_64.whl", hash = "sha256:5762ebb4436f46b566fc6351d67a9b5386b5e5de4e58fdaa18a1c83e0e20f1a8"}, - {file = "Pillow-8.1.2-cp36-cp36m-manylinux2014_aarch64.whl", hash = "sha256:e2cd8ac157c1e5ae88b6dd790648ee5d2777e76f1e5c7d184eaddb2938594f34"}, - {file = "Pillow-8.1.2-cp36-cp36m-win32.whl", hash = "sha256:72027ebf682abc9bafd93b43edc44279f641e8996fb2945104471419113cfc71"}, - {file = "Pillow-8.1.2-cp36-cp36m-win_amd64.whl", hash = "sha256:d1d6bca39bb6dd94fba23cdb3eeaea5e30c7717c5343004d900e2a63b132c341"}, - {file = "Pillow-8.1.2-cp37-cp37m-macosx_10_10_x86_64.whl", hash = "sha256:90882c6f084ef68b71bba190209a734bf90abb82ab5e8f64444c71d5974008c6"}, - {file = "Pillow-8.1.2-cp37-cp37m-manylinux1_i686.whl", hash = "sha256:89e4c757a91b8c55d97c91fa09c69b3677c227b942fa749e9a66eef602f59c28"}, - {file = "Pillow-8.1.2-cp37-cp37m-manylinux1_x86_64.whl", hash = "sha256:8c4e32218c764bc27fe49b7328195579581aa419920edcc321c4cb877c65258d"}, - {file = "Pillow-8.1.2-cp37-cp37m-manylinux2014_aarch64.whl", hash = "sha256:a01da2c266d9868c4f91a9c6faf47a251f23b9a862dce81d2ff583135206f5be"}, - {file = "Pillow-8.1.2-cp37-cp37m-win32.whl", hash = "sha256:30d33a1a6400132e6f521640dd3f64578ac9bfb79a619416d7e8802b4ce1dd55"}, - {file = "Pillow-8.1.2-cp37-cp37m-win_amd64.whl", hash = "sha256:71b01ee69e7df527439d7752a2ce8fb89e19a32df484a308eca3e81f673d3a03"}, - {file = "Pillow-8.1.2-cp38-cp38-macosx_10_10_x86_64.whl", hash = "sha256:5a2d957eb4aba9d48170b8fe6538ec1fbc2119ffe6373782c03d8acad3323f2e"}, - {file = "Pillow-8.1.2-cp38-cp38-manylinux1_i686.whl", hash = "sha256:87f42c976f91ca2fc21a3293e25bd3cd895918597db1b95b93cbd949f7d019ce"}, - {file = "Pillow-8.1.2-cp38-cp38-manylinux1_x86_64.whl", hash = "sha256:15306d71a1e96d7e271fd2a0737038b5a92ca2978d2e38b6ced7966583e3d5af"}, - {file = "Pillow-8.1.2-cp38-cp38-manylinux2014_aarch64.whl", hash = "sha256:71f31ee4df3d5e0b366dd362007740106d3210fb6a56ec4b581a5324ba254f06"}, - {file = "Pillow-8.1.2-cp38-cp38-win32.whl", hash = "sha256:98afcac3205d31ab6a10c5006b0cf040d0026a68ec051edd3517b776c1d78b09"}, - {file = "Pillow-8.1.2-cp38-cp38-win_amd64.whl", hash = "sha256:328240f7dddf77783e72d5ed79899a6b48bc6681f8d1f6001f55933cb4905060"}, - {file = "Pillow-8.1.2-cp39-cp39-macosx_10_10_x86_64.whl", hash = "sha256:bead24c0ae3f1f6afcb915a057943ccf65fc755d11a1410a909c1fefb6c06ad1"}, - {file = "Pillow-8.1.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:81b3716cc9744ffdf76b39afb6247eae754186838cedad0b0ac63b2571253fe6"}, - {file = "Pillow-8.1.2-cp39-cp39-manylinux1_i686.whl", hash = "sha256:63cd413ac52ee3f67057223d363f4f82ce966e64906aea046daf46695e3c8238"}, - {file = "Pillow-8.1.2-cp39-cp39-manylinux1_x86_64.whl", hash = "sha256:8565355a29655b28fdc2c666fd9a3890fe5edc6639d128814fafecfae2d70910"}, - {file = "Pillow-8.1.2-cp39-cp39-manylinux2014_aarch64.whl", hash = "sha256:1940fc4d361f9cc7e558d6f56ff38d7351b53052fd7911f4b60cd7bc091ea3b1"}, - {file = "Pillow-8.1.2-cp39-cp39-win32.whl", hash = "sha256:46c2bcf8e1e75d154e78417b3e3c64e96def738c2a25435e74909e127a8cba5e"}, - {file = "Pillow-8.1.2-cp39-cp39-win_amd64.whl", hash = "sha256:aeab4cd016e11e7aa5cfc49dcff8e51561fa64818a0be86efa82c7038e9369d0"}, - {file = "Pillow-8.1.2-pp36-pypy36_pp73-macosx_10_10_x86_64.whl", hash = "sha256:74cd9aa648ed6dd25e572453eb09b08817a1e3d9f8d1bd4d8403d99e42ea790b"}, - {file = "Pillow-8.1.2-pp36-pypy36_pp73-manylinux2010_i686.whl", hash = "sha256:e5739ae63636a52b706a0facec77b2b58e485637e1638202556156e424a02dc2"}, - {file = "Pillow-8.1.2-pp36-pypy36_pp73-manylinux2010_x86_64.whl", hash = "sha256:903293320efe2466c1ab3509a33d6b866dc850cfd0c5d9cc92632014cec185fb"}, - {file = "Pillow-8.1.2-pp37-pypy37_pp73-macosx_10_10_x86_64.whl", hash = "sha256:5daba2b40782c1c5157a788ec4454067c6616f5a0c1b70e26ac326a880c2d328"}, - {file = "Pillow-8.1.2-pp37-pypy37_pp73-manylinux2010_i686.whl", hash = "sha256:1f93f2fe211f1ef75e6f589327f4d4f8545d5c8e826231b042b483d8383e8a7c"}, - {file = "Pillow-8.1.2-pp37-pypy37_pp73-manylinux2010_x86_64.whl", hash = "sha256:6efac40344d8f668b6c4533ae02a48d52fd852ef0654cc6f19f6ac146399c733"}, - {file = "Pillow-8.1.2-pp37-pypy37_pp73-win32.whl", hash = "sha256:f36c3ff63d6fc509ce599a2f5b0d0732189eed653420e7294c039d342c6e204a"}, - {file = "Pillow-8.1.2.tar.gz", hash = "sha256:b07c660e014852d98a00a91adfbe25033898a9d90a8f39beb2437d22a203fc44"}, + {file = "Pillow-8.2.0-cp36-cp36m-macosx_10_10_x86_64.whl", hash = "sha256:dc38f57d8f20f06dd7c3161c59ca2c86893632623f33a42d592f097b00f720a9"}, + {file = "Pillow-8.2.0-cp36-cp36m-manylinux1_i686.whl", hash = "sha256:a013cbe25d20c2e0c4e85a9daf438f85121a4d0344ddc76e33fd7e3965d9af4b"}, + {file = "Pillow-8.2.0-cp36-cp36m-manylinux1_x86_64.whl", hash = "sha256:8bb1e155a74e1bfbacd84555ea62fa21c58e0b4e7e6b20e4447b8d07990ac78b"}, + {file = "Pillow-8.2.0-cp36-cp36m-manylinux2014_aarch64.whl", hash = "sha256:c5236606e8570542ed424849f7852a0ff0bce2c4c8d0ba05cc202a5a9c97dee9"}, + {file = "Pillow-8.2.0-cp36-cp36m-win32.whl", hash = "sha256:12e5e7471f9b637762453da74e390e56cc43e486a88289995c1f4c1dc0bfe727"}, + {file = "Pillow-8.2.0-cp36-cp36m-win_amd64.whl", hash = "sha256:5afe6b237a0b81bd54b53f835a153770802f164c5570bab5e005aad693dab87f"}, + {file = "Pillow-8.2.0-cp37-cp37m-macosx_10_10_x86_64.whl", hash = "sha256:cb7a09e173903541fa888ba010c345893cd9fc1b5891aaf060f6ca77b6a3722d"}, + {file = "Pillow-8.2.0-cp37-cp37m-manylinux1_i686.whl", hash = "sha256:0d19d70ee7c2ba97631bae1e7d4725cdb2ecf238178096e8c82ee481e189168a"}, + {file = "Pillow-8.2.0-cp37-cp37m-manylinux1_x86_64.whl", hash = "sha256:083781abd261bdabf090ad07bb69f8f5599943ddb539d64497ed021b2a67e5a9"}, + {file = "Pillow-8.2.0-cp37-cp37m-manylinux2014_aarch64.whl", hash = "sha256:c6b39294464b03457f9064e98c124e09008b35a62e3189d3513e5148611c9388"}, + {file = "Pillow-8.2.0-cp37-cp37m-win32.whl", hash = "sha256:01425106e4e8cee195a411f729cff2a7d61813b0b11737c12bd5991f5f14bcd5"}, + {file = "Pillow-8.2.0-cp37-cp37m-win_amd64.whl", hash = "sha256:3b570f84a6161cf8865c4e08adf629441f56e32f180f7aa4ccbd2e0a5a02cba2"}, + {file = "Pillow-8.2.0-cp38-cp38-macosx_10_10_x86_64.whl", hash = "sha256:031a6c88c77d08aab84fecc05c3cde8414cd6f8406f4d2b16fed1e97634cc8a4"}, + {file = "Pillow-8.2.0-cp38-cp38-manylinux1_i686.whl", hash = "sha256:66cc56579fd91f517290ab02c51e3a80f581aba45fd924fcdee01fa06e635812"}, + {file = "Pillow-8.2.0-cp38-cp38-manylinux1_x86_64.whl", hash = "sha256:6c32cc3145928c4305d142ebec682419a6c0a8ce9e33db900027ddca1ec39178"}, + {file = "Pillow-8.2.0-cp38-cp38-manylinux2014_aarch64.whl", hash = "sha256:624b977355cde8b065f6d51b98497d6cd5fbdd4f36405f7a8790e3376125e2bb"}, + {file = "Pillow-8.2.0-cp38-cp38-win32.whl", hash = "sha256:5cbf3e3b1014dddc45496e8cf38b9f099c95a326275885199f427825c6522232"}, + {file = "Pillow-8.2.0-cp38-cp38-win_amd64.whl", hash = "sha256:463822e2f0d81459e113372a168f2ff59723e78528f91f0bd25680ac185cf797"}, + {file = "Pillow-8.2.0-cp39-cp39-macosx_10_10_x86_64.whl", hash = "sha256:95d5ef984eff897850f3a83883363da64aae1000e79cb3c321915468e8c6add5"}, + {file = "Pillow-8.2.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:b91c36492a4bbb1ee855b7d16fe51379e5f96b85692dc8210831fbb24c43e484"}, + {file = "Pillow-8.2.0-cp39-cp39-manylinux1_i686.whl", hash = "sha256:d68cb92c408261f806b15923834203f024110a2e2872ecb0bd2a110f89d3c602"}, + {file = "Pillow-8.2.0-cp39-cp39-manylinux1_x86_64.whl", hash = "sha256:f217c3954ce5fd88303fc0c317af55d5e0204106d86dea17eb8205700d47dec2"}, + {file = "Pillow-8.2.0-cp39-cp39-manylinux2014_aarch64.whl", hash = "sha256:5b70110acb39f3aff6b74cf09bb4169b167e2660dabc304c1e25b6555fa781ef"}, + {file = "Pillow-8.2.0-cp39-cp39-win32.whl", hash = "sha256:a7d5e9fad90eff8f6f6106d3b98b553a88b6f976e51fce287192a5d2d5363713"}, + {file = "Pillow-8.2.0-cp39-cp39-win_amd64.whl", hash = "sha256:238c197fc275b475e87c1453b05b467d2d02c2915fdfdd4af126145ff2e4610c"}, + {file = "Pillow-8.2.0-pp36-pypy36_pp73-macosx_10_10_x86_64.whl", hash = "sha256:0e04d61f0064b545b989126197930807c86bcbd4534d39168f4aa5fda39bb8f9"}, + {file = "Pillow-8.2.0-pp36-pypy36_pp73-manylinux2010_i686.whl", hash = "sha256:63728564c1410d99e6d1ae8e3b810fe012bc440952168af0a2877e8ff5ab96b9"}, + {file = "Pillow-8.2.0-pp36-pypy36_pp73-manylinux2010_x86_64.whl", hash = "sha256:c03c07ed32c5324939b19e36ae5f75c660c81461e312a41aea30acdd46f93a7c"}, + {file = "Pillow-8.2.0-pp37-pypy37_pp73-macosx_10_10_x86_64.whl", hash = "sha256:4d98abdd6b1e3bf1a1cbb14c3895226816e666749ac040c4e2554231068c639b"}, + {file = "Pillow-8.2.0-pp37-pypy37_pp73-manylinux2010_i686.whl", hash = "sha256:aac00e4bc94d1b7813fe882c28990c1bc2f9d0e1aa765a5f2b516e8a6a16a9e4"}, + {file = "Pillow-8.2.0-pp37-pypy37_pp73-manylinux2010_x86_64.whl", hash = "sha256:22fd0f42ad15dfdde6c581347eaa4adb9a6fc4b865f90b23378aa7914895e120"}, + {file = "Pillow-8.2.0-pp37-pypy37_pp73-win32.whl", hash = "sha256:e98eca29a05913e82177b3ba3d198b1728e164869c613d76d0de4bde6768a50e"}, + {file = "Pillow-8.2.0.tar.gz", hash = "sha256:a787ab10d7bb5494e5f76536ac460741788f1fbce851068d73a87ca7c35fc3e1"}, ] pluggy = [ {file = "pluggy-0.13.1-py2.py3-none-any.whl", hash = "sha256:966c145cd83c96502c3c3868f50408687b38434af77734af1e9ca461a4081d2d"}, {file = "pluggy-0.13.1.tar.gz", hash = "sha256:15b2acde666561e1298d71b523007ed7364de07029219b604cf808bfa1c765b0"}, ] +prefixed = [ + {file = "prefixed-0.3.2-py2.py3-none-any.whl", hash = "sha256:5e107306462d63f2f03c529dbf11b0026fdfec621a9a008ca639d71de22995c3"}, + {file = "prefixed-0.3.2.tar.gz", hash = "sha256:ca48277ba5fa8346dd4b760847da930c7b84416387c39e93affef086add2c029"}, +] protobuf = [ - {file = "protobuf-3.15.6-cp27-cp27m-macosx_10_9_x86_64.whl", hash = "sha256:1771ef20e88759c4d81db213e89b7a1fc53937968e12af6603c658ee4bcbfa38"}, - {file = "protobuf-3.15.6-cp27-cp27mu-manylinux1_x86_64.whl", hash = "sha256:1a66261a402d05c8ad8c1fde8631837307bf8d7e7740a4f3941fc3277c2e1528"}, - {file = "protobuf-3.15.6-cp35-cp35m-macosx_10_9_intel.whl", hash = "sha256:eac23a3e56175b710f3da9a9e8e2aa571891fbec60e0c5a06db1c7b1613b5cfd"}, - {file = "protobuf-3.15.6-cp35-cp35m-manylinux1_x86_64.whl", hash = "sha256:9ec220d90eda8bb7a7a1434a8aed4fe26d7e648c1a051c2885f3f5725b6aa71a"}, - {file = "protobuf-3.15.6-cp35-cp35m-win32.whl", hash = "sha256:88d8f21d1ac205eedb6dea943f8204ed08201b081dba2a966ab5612788b9bb1e"}, - {file = "protobuf-3.15.6-cp35-cp35m-win_amd64.whl", hash = "sha256:eaada29bbf087dea7d8bce4d1d604fc768749e8809e9c295922accd7c8fce4d5"}, - {file = "protobuf-3.15.6-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:256c0b2e338c1f3228d3280707606fe5531fde85ab9d704cde6fdeb55112531f"}, - {file = "protobuf-3.15.6-cp36-cp36m-manylinux1_x86_64.whl", hash = "sha256:b9069e45b6e78412fba4a314ea38b4a478686060acf470d2b131b3a2c50484ec"}, - {file = "protobuf-3.15.6-cp36-cp36m-win32.whl", hash = "sha256:24f4697f57b8520c897a401b7f9a5ae45c369e22c572e305dfaf8053ecb49687"}, - {file = "protobuf-3.15.6-cp36-cp36m-win_amd64.whl", hash = "sha256:d9ed0955b794f1e5f367e27f8a8ff25501eabe34573f003f06639c366ca75f73"}, - {file = "protobuf-3.15.6-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:822ac7f87fc2fb9b24edd2db390538b60ef50256e421ca30d65250fad5a3d477"}, - {file = "protobuf-3.15.6-cp37-cp37m-manylinux1_x86_64.whl", hash = "sha256:74ac159989e2b02d761188a2b6f4601ff5e494d9b9d863f5ad6e98e5e0c54328"}, - {file = "protobuf-3.15.6-cp37-cp37m-win32.whl", hash = "sha256:30fe4249a364576f9594180589c3f9c4771952014b5f77f0372923fc7bafbbe2"}, - {file = "protobuf-3.15.6-cp37-cp37m-win_amd64.whl", hash = "sha256:45a91fc6f9aa86d3effdeda6751882b02de628519ba06d7160daffde0c889ff8"}, - {file = "protobuf-3.15.6-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:83c7c7534f050cb25383bb817159416601d1cc46c40bc5e851ec8bbddfc34a2f"}, - {file = "protobuf-3.15.6-cp38-cp38-manylinux1_x86_64.whl", hash = "sha256:9ec20a6ded7d0888e767ad029dbb126e604e18db744ac0a428cf746e040ccecd"}, - {file = "protobuf-3.15.6-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:0f2da2fcc4102b6c3b57f03c9d8d5e37c63f8bc74deaa6cb54e0cc4524a77247"}, - {file = "protobuf-3.15.6-cp39-cp39-manylinux1_x86_64.whl", hash = "sha256:70054ae1ce5dea7dec7357db931fcf487f40ea45b02cb719ee6af07eb1e906fb"}, - {file = "protobuf-3.15.6-py2.py3-none-any.whl", hash = "sha256:1655fc0ba7402560d749de13edbfca1ac45d1753d8f4e5292989f18f5a00c215"}, - {file = "protobuf-3.15.6.tar.gz", hash = "sha256:2b974519a2ae83aa1e31cff9018c70bbe0e303a46a598f982943c49ae1d4fcd3"}, + {file = "protobuf-3.15.8-cp27-cp27m-macosx_10_9_x86_64.whl", hash = "sha256:fad4f971ec38d8df7f4b632c819bf9bbf4f57cfd7312cf526c69ce17ef32436a"}, + {file = "protobuf-3.15.8-cp27-cp27mu-manylinux1_x86_64.whl", hash = "sha256:f17b352d7ce33c81773cf81d536ca70849de6f73c96413f17309f4b43ae7040b"}, + {file = "protobuf-3.15.8-cp35-cp35m-macosx_10_9_intel.whl", hash = "sha256:4a054b0b5900b7ea7014099e783fb8c4618e4209fffcd6050857517b3f156e18"}, + {file = "protobuf-3.15.8-cp35-cp35m-manylinux1_x86_64.whl", hash = "sha256:efa4c4d4fc9ba734e5e85eaced70e1b63fb3c8d08482d839eb838566346f1737"}, + {file = "protobuf-3.15.8-cp35-cp35m-win32.whl", hash = "sha256:07eec4e2ccbc74e95bb9b3afe7da67957947ee95bdac2b2e91b038b832dd71f0"}, + {file = "protobuf-3.15.8-cp35-cp35m-win_amd64.whl", hash = "sha256:f9cadaaa4065d5dd4d15245c3b68b967b3652a3108e77f292b58b8c35114b56c"}, + {file = "protobuf-3.15.8-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:2dc0e8a9e4962207bdc46a365b63a3f1aca6f9681a5082a326c5837ef8f4b745"}, + {file = "protobuf-3.15.8-cp36-cp36m-manylinux1_x86_64.whl", hash = "sha256:f80afc0a0ba13339bbab25ca0409e9e2836b12bb012364c06e97c2df250c3343"}, + {file = "protobuf-3.15.8-cp36-cp36m-win32.whl", hash = "sha256:c5566f956a26cda3abdfacc0ca2e21db6c9f3d18f47d8d4751f2209d6c1a5297"}, + {file = "protobuf-3.15.8-cp36-cp36m-win_amd64.whl", hash = "sha256:dab75b56a12b1ceb3e40808b5bd9dfdaef3a1330251956e6744e5b6ed8f8830b"}, + {file = "protobuf-3.15.8-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:3053f13207e7f13dc7be5e9071b59b02020172f09f648e85dc77e3fcb50d1044"}, + {file = "protobuf-3.15.8-cp37-cp37m-manylinux1_x86_64.whl", hash = "sha256:1f0b5d156c3df08cc54bc2c8b8b875648ea4cd7ebb2a9a130669f7547ec3488c"}, + {file = "protobuf-3.15.8-cp37-cp37m-win32.whl", hash = "sha256:90270fe5732c1f1ff664a3bd7123a16456d69b4e66a09a139a00443a32f210b8"}, + {file = "protobuf-3.15.8-cp37-cp37m-win_amd64.whl", hash = "sha256:f42c2f5fb67da5905bfc03733a311f72fa309252bcd77c32d1462a1ad519521e"}, + {file = "protobuf-3.15.8-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:f6077db37bfa16494dca58a4a02bfdacd87662247ad6bc1f7f8d13ff3f0013e1"}, + {file = "protobuf-3.15.8-cp38-cp38-manylinux1_x86_64.whl", hash = "sha256:510e66491f1a5ac5953c908aa8300ec47f793130097e4557482803b187a8ee05"}, + {file = "protobuf-3.15.8-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:5ff9fa0e67fcab442af9bc8d4ec3f82cb2ff3be0af62dba047ed4187f0088b7d"}, + {file = "protobuf-3.15.8-cp39-cp39-manylinux1_x86_64.whl", hash = "sha256:1c0e9e56202b9dccbc094353285a252e2b7940b74fdf75f1b4e1b137833fabd7"}, + {file = "protobuf-3.15.8-py2.py3-none-any.whl", hash = "sha256:a0a08c6b2e6d6c74a6eb5bf6184968eefb1569279e78714e239d33126e753403"}, + {file = "protobuf-3.15.8.tar.gz", hash = "sha256:0277f62b1e42210cafe79a71628c1d553348da81cbd553402a7f7549c50b11d0"}, ] py = [ {file = "py-1.10.0-py2.py3-none-any.whl", hash = "sha256:3b80836aa6d1feeaa108e046da6423ab8f6ceda6468545ae8d02d9d58d18818a"}, @@ -1960,8 +2011,8 @@ pyblish-base = [ {file = "pyblish_base-1.8.8-py2.py3-none-any.whl", hash = "sha256:67ea253a05d007ab4a175e44e778928ea7bdb0e9707573e1100417bbf0451a53"}, ] pycodestyle = [ - {file = "pycodestyle-2.6.0-py2.py3-none-any.whl", hash = "sha256:2295e7b2f6b5bd100585ebcb1f616591b652db8a741695b3d8f5d28bdc934367"}, - {file = "pycodestyle-2.6.0.tar.gz", hash = "sha256:c58a7d2815e0e8d7972bf1803331fb0152f867bd89adf8a01dfd55085434192e"}, + {file = "pycodestyle-2.7.0-py2.py3-none-any.whl", hash = "sha256:514f76d918fcc0b55c6680472f0a37970994e07bbb80725808c17089be302068"}, + {file = "pycodestyle-2.7.0.tar.gz", hash = "sha256:c389c1d06bf7904078ca03399a4816f974a1d590090fecea0c63ec26ebaf1cef"}, ] pycparser = [ {file = "pycparser-2.20-py2.py3-none-any.whl", hash = "sha256:7582ad22678f0fcd81102833f60ef8d0e57288b6b5fb00323d101be910e35705"}, @@ -1973,16 +2024,16 @@ pydocstyle = [ {file = "pydocstyle-3.0.0.tar.gz", hash = "sha256:5741c85e408f9e0ddf873611085e819b809fca90b619f5fd7f34bd4959da3dd4"}, ] pyflakes = [ - {file = "pyflakes-2.2.0-py2.py3-none-any.whl", hash = "sha256:0d94e0e05a19e57a99444b6ddcf9a6eb2e5c68d3ca1e98e90707af8152c90a92"}, - {file = "pyflakes-2.2.0.tar.gz", hash = "sha256:35b2d75ee967ea93b55750aa9edbbf72813e06a66ba54438df2cfac9e3c27fc8"}, + {file = "pyflakes-2.3.1-py2.py3-none-any.whl", hash = "sha256:7893783d01b8a89811dd72d7dfd4d84ff098e5eed95cfa8905b22bbffe52efc3"}, + {file = "pyflakes-2.3.1.tar.gz", hash = "sha256:f5bc8ecabc05bb9d291eb5203d6810b49040f6ff446a756326104746cc00c1db"}, ] pygments = [ {file = "Pygments-2.8.1-py3-none-any.whl", hash = "sha256:534ef71d539ae97d4c3a4cf7d6f110f214b0e687e92f9cb9d2a3b0d3101289c8"}, {file = "Pygments-2.8.1.tar.gz", hash = "sha256:2656e1a6edcdabf4275f9a3640db59fd5de107d88e8663c5d4e9a0fa62f77f94"}, ] pylint = [ - {file = "pylint-2.7.2-py3-none-any.whl", hash = "sha256:d09b0b07ba06bcdff463958f53f23df25e740ecd81895f7d2699ec04bbd8dc3b"}, - {file = "pylint-2.7.2.tar.gz", hash = "sha256:0e21d3b80b96740909d77206d741aa3ce0b06b41be375d92e1f3244a274c1f8a"}, + {file = "pylint-2.7.4-py3-none-any.whl", hash = "sha256:209d712ec870a0182df034ae19f347e725c1e615b2269519ab58a35b3fcbbe7a"}, + {file = "pylint-2.7.4.tar.gz", hash = "sha256:bd38914c7731cdc518634a8d3c5585951302b6e2b6de60fbb3f7a0220e21eeee"}, ] pymongo = [ {file = "pymongo-3.11.3-cp27-cp27m-macosx_10_14_intel.whl", hash = "sha256:4d959e929cec805c2bf391418b1121590b4e7d5cb00af7b1ba521443d45a0918"}, @@ -2123,8 +2174,8 @@ pyrsistent = [ {file = "pyrsistent-0.17.3.tar.gz", hash = "sha256:2e636185d9eb976a18a8a8e96efce62f2905fea90041958d8cc2a189756ebf3e"}, ] pytest = [ - {file = "pytest-6.2.2-py3-none-any.whl", hash = "sha256:b574b57423e818210672e07ca1fa90aaf194a4f63f3ab909a2c67ebb22913839"}, - {file = "pytest-6.2.2.tar.gz", hash = "sha256:9d1edf9e7d0b84d72ea3dbcdfd22b35fb543a5e8f2a60092dd578936bf63d7f9"}, + {file = "pytest-6.2.3-py3-none-any.whl", hash = "sha256:6ad9c7bdf517a808242b998ac20063c41532a570d088d77eec1ee12b0b5574bc"}, + {file = "pytest-6.2.3.tar.gz", hash = "sha256:671238a46e4df0f3498d1c3270e5deb9b32d25134c99b7d75370a68cfbe9b634"}, ] pytest-cov = [ {file = "pytest-cov-2.11.1.tar.gz", hash = "sha256:359952d9d39b9f822d9d29324483e7ba04a3a17dd7d05aa6beb7ea01e359e5f7"}, @@ -2198,16 +2249,16 @@ speedcopy = [ {file = "speedcopy-2.1.0.tar.gz", hash = "sha256:8bb1a6c735900b83901a7be84ba2175ed3887c13c6786f97dea48f2ea7d504c2"}, ] sphinx = [ - {file = "Sphinx-3.5.2-py3-none-any.whl", hash = "sha256:ef64a814576f46ec7de06adf11b433a0d6049be007fefe7fd0d183d28b581fac"}, - {file = "Sphinx-3.5.2.tar.gz", hash = "sha256:672cfcc24b6b69235c97c750cb190a44ecd72696b4452acaf75c2d9cc78ca5ff"}, + {file = "Sphinx-3.5.4-py3-none-any.whl", hash = "sha256:2320d4e994a191f4b4be27da514e46b3d6b420f2ff895d064f52415d342461e8"}, + {file = "Sphinx-3.5.4.tar.gz", hash = "sha256:19010b7b9fa0dc7756a6e105b2aacd3a80f798af3c25c273be64d7beeb482cb1"}, ] sphinx-qt-documentation = [ {file = "sphinx_qt_documentation-0.3-py3-none-any.whl", hash = "sha256:bee247cb9e4fc03fc496d07adfdb943100e1103320c3e5e820e0cfa7c790d9b6"}, {file = "sphinx_qt_documentation-0.3.tar.gz", hash = "sha256:f09a0c9d9e989172ba3e282b92bf55613bb23ad47315ec5b0d38536b343ac6c8"}, ] sphinx-rtd-theme = [ - {file = "sphinx_rtd_theme-0.5.1-py2.py3-none-any.whl", hash = "sha256:fa6bebd5ab9a73da8e102509a86f3fcc36dec04a0b52ea80e5a033b2aba00113"}, - {file = "sphinx_rtd_theme-0.5.1.tar.gz", hash = "sha256:eda689eda0c7301a80cf122dad28b1861e5605cbf455558f3775e1e8200e83a5"}, + {file = "sphinx_rtd_theme-0.5.2-py2.py3-none-any.whl", hash = "sha256:4a05bdbe8b1446d77a01e20a23ebc6777c74f43237035e76be89699308987d6f"}, + {file = "sphinx_rtd_theme-0.5.2.tar.gz", hash = "sha256:32bd3b5d13dc8186d7a42fc816a23d32e83a4827d7d9882948e7b837c232da5a"}, ] sphinxcontrib-applehelp = [ {file = "sphinxcontrib-applehelp-1.0.2.tar.gz", hash = "sha256:a072735ec80e7675e3f432fcae8610ecf509c5f1869d17e2eecff44389cdbc58"}, @@ -2244,41 +2295,37 @@ toml = [ {file = "toml-0.10.2-py2.py3-none-any.whl", hash = "sha256:806143ae5bfb6a3c6e736a764057db0e6a0e05e338b5630894a5f779cabb4f9b"}, {file = "toml-0.10.2.tar.gz", hash = "sha256:b3bda1d108d5dd99f4a20d24d9c348e91c4db7ab1b749200bded2f839ccbe68f"}, ] -tqdm = [ - {file = "tqdm-4.59.0-py2.py3-none-any.whl", hash = "sha256:9fdf349068d047d4cfbe24862c425883af1db29bcddf4b0eeb2524f6fbdb23c7"}, - {file = "tqdm-4.59.0.tar.gz", hash = "sha256:d666ae29164da3e517fcf125e41d4fe96e5bb375cd87ff9763f6b38b5592fe33"}, -] typed-ast = [ - {file = "typed_ast-1.4.2-cp35-cp35m-manylinux1_i686.whl", hash = "sha256:7703620125e4fb79b64aa52427ec192822e9f45d37d4b6625ab37ef403e1df70"}, - {file = "typed_ast-1.4.2-cp35-cp35m-manylinux1_x86_64.whl", hash = "sha256:c9aadc4924d4b5799112837b226160428524a9a45f830e0d0f184b19e4090487"}, - {file = "typed_ast-1.4.2-cp35-cp35m-manylinux2014_aarch64.whl", hash = "sha256:9ec45db0c766f196ae629e509f059ff05fc3148f9ffd28f3cfe75d4afb485412"}, - {file = "typed_ast-1.4.2-cp35-cp35m-win32.whl", hash = "sha256:85f95aa97a35bdb2f2f7d10ec5bbdac0aeb9dafdaf88e17492da0504de2e6400"}, - {file = "typed_ast-1.4.2-cp35-cp35m-win_amd64.whl", hash = "sha256:9044ef2df88d7f33692ae3f18d3be63dec69c4fb1b5a4a9ac950f9b4ba571606"}, - {file = "typed_ast-1.4.2-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:c1c876fd795b36126f773db9cbb393f19808edd2637e00fd6caba0e25f2c7b64"}, - {file = "typed_ast-1.4.2-cp36-cp36m-manylinux1_i686.whl", hash = "sha256:5dcfc2e264bd8a1db8b11a892bd1647154ce03eeba94b461effe68790d8b8e07"}, - {file = "typed_ast-1.4.2-cp36-cp36m-manylinux1_x86_64.whl", hash = "sha256:8db0e856712f79c45956da0c9a40ca4246abc3485ae0d7ecc86a20f5e4c09abc"}, - {file = "typed_ast-1.4.2-cp36-cp36m-manylinux2014_aarch64.whl", hash = "sha256:d003156bb6a59cda9050e983441b7fa2487f7800d76bdc065566b7d728b4581a"}, - {file = "typed_ast-1.4.2-cp36-cp36m-win32.whl", hash = "sha256:4c790331247081ea7c632a76d5b2a265e6d325ecd3179d06e9cf8d46d90dd151"}, - {file = "typed_ast-1.4.2-cp36-cp36m-win_amd64.whl", hash = "sha256:d175297e9533d8d37437abc14e8a83cbc68af93cc9c1c59c2c292ec59a0697a3"}, - {file = "typed_ast-1.4.2-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:cf54cfa843f297991b7388c281cb3855d911137223c6b6d2dd82a47ae5125a41"}, - {file = "typed_ast-1.4.2-cp37-cp37m-manylinux1_i686.whl", hash = "sha256:b4fcdcfa302538f70929eb7b392f536a237cbe2ed9cba88e3bf5027b39f5f77f"}, - {file = "typed_ast-1.4.2-cp37-cp37m-manylinux1_x86_64.whl", hash = "sha256:987f15737aba2ab5f3928c617ccf1ce412e2e321c77ab16ca5a293e7bbffd581"}, - {file = "typed_ast-1.4.2-cp37-cp37m-manylinux2014_aarch64.whl", hash = "sha256:37f48d46d733d57cc70fd5f30572d11ab8ed92da6e6b28e024e4a3edfb456e37"}, - {file = "typed_ast-1.4.2-cp37-cp37m-win32.whl", hash = "sha256:36d829b31ab67d6fcb30e185ec996e1f72b892255a745d3a82138c97d21ed1cd"}, - {file = "typed_ast-1.4.2-cp37-cp37m-win_amd64.whl", hash = "sha256:8368f83e93c7156ccd40e49a783a6a6850ca25b556c0fa0240ed0f659d2fe496"}, - {file = "typed_ast-1.4.2-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:963c80b583b0661918718b095e02303d8078950b26cc00b5e5ea9ababe0de1fc"}, - {file = "typed_ast-1.4.2-cp38-cp38-manylinux1_i686.whl", hash = "sha256:e683e409e5c45d5c9082dc1daf13f6374300806240719f95dc783d1fc942af10"}, - {file = "typed_ast-1.4.2-cp38-cp38-manylinux1_x86_64.whl", hash = "sha256:84aa6223d71012c68d577c83f4e7db50d11d6b1399a9c779046d75e24bed74ea"}, - {file = "typed_ast-1.4.2-cp38-cp38-manylinux2014_aarch64.whl", hash = "sha256:a38878a223bdd37c9709d07cd357bb79f4c760b29210e14ad0fb395294583787"}, - {file = "typed_ast-1.4.2-cp38-cp38-win32.whl", hash = "sha256:a2c927c49f2029291fbabd673d51a2180038f8cd5a5b2f290f78c4516be48be2"}, - {file = "typed_ast-1.4.2-cp38-cp38-win_amd64.whl", hash = "sha256:c0c74e5579af4b977c8b932f40a5464764b2f86681327410aa028a22d2f54937"}, - {file = "typed_ast-1.4.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:07d49388d5bf7e863f7fa2f124b1b1d89d8aa0e2f7812faff0a5658c01c59aa1"}, - {file = "typed_ast-1.4.2-cp39-cp39-manylinux1_i686.whl", hash = "sha256:240296b27397e4e37874abb1df2a608a92df85cf3e2a04d0d4d61055c8305ba6"}, - {file = "typed_ast-1.4.2-cp39-cp39-manylinux1_x86_64.whl", hash = "sha256:d746a437cdbca200622385305aedd9aef68e8a645e385cc483bdc5e488f07166"}, - {file = "typed_ast-1.4.2-cp39-cp39-manylinux2014_aarch64.whl", hash = "sha256:14bf1522cdee369e8f5581238edac09150c765ec1cb33615855889cf33dcb92d"}, - {file = "typed_ast-1.4.2-cp39-cp39-win32.whl", hash = "sha256:cc7b98bf58167b7f2db91a4327da24fb93368838eb84a44c472283778fc2446b"}, - {file = "typed_ast-1.4.2-cp39-cp39-win_amd64.whl", hash = "sha256:7147e2a76c75f0f64c4319886e7639e490fee87c9d25cb1d4faef1d8cf83a440"}, - {file = "typed_ast-1.4.2.tar.gz", hash = "sha256:9fc0b3cb5d1720e7141d103cf4819aea239f7d136acf9ee4a69b047b7986175a"}, + {file = "typed_ast-1.4.3-cp35-cp35m-manylinux1_i686.whl", hash = "sha256:2068531575a125b87a41802130fa7e29f26c09a2833fea68d9a40cf33902eba6"}, + {file = "typed_ast-1.4.3-cp35-cp35m-manylinux1_x86_64.whl", hash = "sha256:c907f561b1e83e93fad565bac5ba9c22d96a54e7ea0267c708bffe863cbe4075"}, + {file = "typed_ast-1.4.3-cp35-cp35m-manylinux2014_aarch64.whl", hash = "sha256:1b3ead4a96c9101bef08f9f7d1217c096f31667617b58de957f690c92378b528"}, + {file = "typed_ast-1.4.3-cp35-cp35m-win32.whl", hash = "sha256:dde816ca9dac1d9c01dd504ea5967821606f02e510438120091b84e852367428"}, + {file = "typed_ast-1.4.3-cp35-cp35m-win_amd64.whl", hash = "sha256:777a26c84bea6cd934422ac2e3b78863a37017618b6e5c08f92ef69853e765d3"}, + {file = "typed_ast-1.4.3-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:f8afcf15cc511ada719a88e013cec87c11aff7b91f019295eb4530f96fe5ef2f"}, + {file = "typed_ast-1.4.3-cp36-cp36m-manylinux1_i686.whl", hash = "sha256:52b1eb8c83f178ab787f3a4283f68258525f8d70f778a2f6dd54d3b5e5fb4341"}, + {file = "typed_ast-1.4.3-cp36-cp36m-manylinux1_x86_64.whl", hash = "sha256:01ae5f73431d21eead5015997ab41afa53aa1fbe252f9da060be5dad2c730ace"}, + {file = "typed_ast-1.4.3-cp36-cp36m-manylinux2014_aarch64.whl", hash = "sha256:c190f0899e9f9f8b6b7863debfb739abcb21a5c054f911ca3596d12b8a4c4c7f"}, + {file = "typed_ast-1.4.3-cp36-cp36m-win32.whl", hash = "sha256:398e44cd480f4d2b7ee8d98385ca104e35c81525dd98c519acff1b79bdaac363"}, + {file = "typed_ast-1.4.3-cp36-cp36m-win_amd64.whl", hash = "sha256:bff6ad71c81b3bba8fa35f0f1921fb24ff4476235a6e94a26ada2e54370e6da7"}, + {file = "typed_ast-1.4.3-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:0fb71b8c643187d7492c1f8352f2c15b4c4af3f6338f21681d3681b3dc31a266"}, + {file = "typed_ast-1.4.3-cp37-cp37m-manylinux1_i686.whl", hash = "sha256:760ad187b1041a154f0e4d0f6aae3e40fdb51d6de16e5c99aedadd9246450e9e"}, + {file = "typed_ast-1.4.3-cp37-cp37m-manylinux1_x86_64.whl", hash = "sha256:5feca99c17af94057417d744607b82dd0a664fd5e4ca98061480fd8b14b18d04"}, + {file = "typed_ast-1.4.3-cp37-cp37m-manylinux2014_aarch64.whl", hash = "sha256:95431a26309a21874005845c21118c83991c63ea800dd44843e42a916aec5899"}, + {file = "typed_ast-1.4.3-cp37-cp37m-win32.whl", hash = "sha256:aee0c1256be6c07bd3e1263ff920c325b59849dc95392a05f258bb9b259cf39c"}, + {file = "typed_ast-1.4.3-cp37-cp37m-win_amd64.whl", hash = "sha256:9ad2c92ec681e02baf81fdfa056fe0d818645efa9af1f1cd5fd6f1bd2bdfd805"}, + {file = "typed_ast-1.4.3-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:b36b4f3920103a25e1d5d024d155c504080959582b928e91cb608a65c3a49e1a"}, + {file = "typed_ast-1.4.3-cp38-cp38-manylinux1_i686.whl", hash = "sha256:067a74454df670dcaa4e59349a2e5c81e567d8d65458d480a5b3dfecec08c5ff"}, + {file = "typed_ast-1.4.3-cp38-cp38-manylinux1_x86_64.whl", hash = "sha256:7538e495704e2ccda9b234b82423a4038f324f3a10c43bc088a1636180f11a41"}, + {file = "typed_ast-1.4.3-cp38-cp38-manylinux2014_aarch64.whl", hash = "sha256:af3d4a73793725138d6b334d9d247ce7e5f084d96284ed23f22ee626a7b88e39"}, + {file = "typed_ast-1.4.3-cp38-cp38-win32.whl", hash = "sha256:f2362f3cb0f3172c42938946dbc5b7843c2a28aec307c49100c8b38764eb6927"}, + {file = "typed_ast-1.4.3-cp38-cp38-win_amd64.whl", hash = "sha256:dd4a21253f42b8d2b48410cb31fe501d32f8b9fbeb1f55063ad102fe9c425e40"}, + {file = "typed_ast-1.4.3-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:f328adcfebed9f11301eaedfa48e15bdece9b519fb27e6a8c01aa52a17ec31b3"}, + {file = "typed_ast-1.4.3-cp39-cp39-manylinux1_i686.whl", hash = "sha256:2c726c276d09fc5c414693a2de063f521052d9ea7c240ce553316f70656c84d4"}, + {file = "typed_ast-1.4.3-cp39-cp39-manylinux1_x86_64.whl", hash = "sha256:cae53c389825d3b46fb37538441f75d6aecc4174f615d048321b716df2757fb0"}, + {file = "typed_ast-1.4.3-cp39-cp39-manylinux2014_aarch64.whl", hash = "sha256:b9574c6f03f685070d859e75c7f9eeca02d6933273b5e69572e5ff9d5e3931c3"}, + {file = "typed_ast-1.4.3-cp39-cp39-win32.whl", hash = "sha256:209596a4ec71d990d71d5e0d312ac935d86930e6eecff6ccc7007fe54d703808"}, + {file = "typed_ast-1.4.3-cp39-cp39-win_amd64.whl", hash = "sha256:9c6d1a54552b5330bc657b7ef0eae25d00ba7ffe85d9ea8ae6540d2197a3788c"}, + {file = "typed_ast-1.4.3.tar.gz", hash = "sha256:fb1bbeac803adea29cedd70781399c99138358c26d05fcbd23c13016b7f5ec65"}, ] typing-extensions = [ {file = "typing_extensions-3.7.4.3-py2-none-any.whl", hash = "sha256:dafc7639cde7f1b6e1acc0f457842a83e722ccca8eef5270af2d74792619a89f"}, @@ -2290,8 +2337,8 @@ uritemplate = [ {file = "uritemplate-3.0.1.tar.gz", hash = "sha256:5af8ad10cec94f215e3f48112de2022e1d5a37ed427fbd88652fa908f2ab7cae"}, ] urllib3 = [ - {file = "urllib3-1.26.3-py2.py3-none-any.whl", hash = "sha256:1b465e494e3e0d8939b50680403e3aedaa2bc434b7d5af64dfd3c958d7f5ae80"}, - {file = "urllib3-1.26.3.tar.gz", hash = "sha256:de3eedaad74a2683334e282005cd8d7f22f4d55fa690a2a1020a416cb0a47e73"}, + {file = "urllib3-1.26.4-py2.py3-none-any.whl", hash = "sha256:2f4da4594db7e1e110a944bb1b551fdf4e6c136ad42e4234131391e21eb5b0df"}, + {file = "urllib3-1.26.4.tar.gz", hash = "sha256:e7b021f7241115872f92f43c6508082facffbd1c048e3c6e2bb9c2a157e28937"}, ] wcwidth = [ {file = "wcwidth-0.2.5-py2.py3-none-any.whl", hash = "sha256:beb4802a9cebb9144e99086eff703a642a13d6a0052920003a230f3294bbe784"}, @@ -2305,8 +2352,8 @@ wrapt = [ {file = "wrapt-1.12.1.tar.gz", hash = "sha256:b62ffa81fb85f4332a4f609cab4ac40709470da05643a082ec1eb88e6d9b97d7"}, ] wsrpc-aiohttp = [ - {file = "wsrpc-aiohttp-3.1.1.tar.gz", hash = "sha256:a17e1d91624a437e759d4f276b73de1db2071b1681e992cade025e91d31b2a9f"}, - {file = "wsrpc_aiohttp-3.1.1-py3-none-any.whl", hash = "sha256:f3f1ee31aed5145a7fafe8d6c778b914b7e6ec131500395c9c85b0d8676f7302"}, + {file = "wsrpc-aiohttp-3.1.2.tar.gz", hash = "sha256:891164dfe06a8d8d846b485d04b1e56b2c397ff1b46ef0348e6f62bd8efb1693"}, + {file = "wsrpc_aiohttp-3.1.2-py3-none-any.whl", hash = "sha256:4ba64e02b12dcbc09d02544f35bceba49bd04cbc496db47aa8559ae4609ada8e"}, ] yarl = [ {file = "yarl-1.6.3-cp36-cp36m-macosx_10_14_x86_64.whl", hash = "sha256:0355a701b3998dcd832d0dc47cc5dedf3874f966ac7f870e0f3a6788d802d434"}, diff --git a/pyproject.toml b/pyproject.toml index ec2d9c7e3b..88c977cd99 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,7 +1,7 @@ [tool.poetry] name = "OpenPype" -version = "3.0.0-alpha1" -description = "Multi-platform open-source pipeline built around the Avalon platform, expanding it with extra features and integrations." +version = "3.0.0-beta2" +description = "Open VFX and Animation pipeline with support." authors = ["OpenPype Team "] license = "MIT License" homepage = "https://openpype.io" @@ -18,6 +18,7 @@ acre = { git = "https://github.com/pypeclub/acre.git" } opentimelineio = { version = "0.14.0.dev1", source = "openpype" } appdirs = "^1.4.3" blessed = "^1.17" # openpype terminal formatting +coolname = "*" clique = "1.5.*" Click = "^7" dnspython = "^2.1.0" @@ -42,12 +43,13 @@ jinxed = [ { version = "^1.0.1", markers = "sys_platform == 'linux'" } ] python3-xlib = { version="*", markers = "sys_platform == 'linux'"} +enlighten = "^1.9.0" [tool.poetry.dev-dependencies] flake8 = "^3.7" autopep8 = "^1.4" coverage = "*" -cx_freeze = "^6.5" +cx_freeze = "^6.6" jedi = "^0.13" Jinja2 = "^2.11" pycodestyle = "^2.5.0" @@ -61,8 +63,8 @@ sphinx-rtd-theme = "*" sphinxcontrib-websupport = "*" sphinx-qt-documentation = "*" recommonmark = "*" -tqdm = "*" wheel = "*" +enlighten = "*" # cool terminal progress bars [tool.poetry.urls] "Bug Tracker" = "https://github.com/pypeclub/openpype/issues" @@ -75,3 +77,29 @@ url = "https://distribute.openpype.io/wheels/" [build-system] requires = ["poetry-core>=1.0.0"] build-backend = "poetry.core.masonry.api" + +[openpype] + +[openpype.thirdparty.ffmpeg.windows] +url = "https://distribute.openpype.io/thirdparty/ffmpeg-4.4-windows.zip" +hash = "dd51ba29d64ee238e7c4c3c7301b19754c3f0ee2e2a729c20a0e2789e72db925" + +[openpype.thirdparty.ffmpeg.linux] +url = "https://distribute.openpype.io/thirdparty/ffmpeg-4.4-linux.tgz" +hash = "10b9beda57cfbb69b9ed0ce896c0c8d99227b26ca8b9f611040c4752e365cbe9" + +[openpype.thirdparty.ffmpeg.darwin] +url = "https://distribute.openpype.io/thirdparty/ffmpeg-4.4-macos.tgz" +hash = "95f43568338c275f80dc0cab1e1836a2e2270f856f0e7b204440d881dd74fbdb" + +[openpype.thirdparty.oiio.windows] +url = "https://distribute.openpype.io/thirdparty/oiio_tools-2.2.0-windows.zip" +hash = "fd2e00278e01e85dcee7b4a6969d1a16f13016ec16700fb0366dbb1b1f3c37ad" + +[openpype.thirdparty.oiio.linux] +url = "https://distribute.openpype.io/thirdparty/oiio-2.2.0-linux.tgz" +hash = "sha256:..." + +[openpype.thirdparty.oiio.darwin] +url = "https://distribute.openpype.io/thirdparty/oiio-2.2.0-darwin.tgz" +hash = "sha256:..." \ No newline at end of file diff --git a/setup.cfg b/setup.cfg index 03c999e05f..51a3c1a9ca 100644 --- a/setup.cfg +++ b/setup.cfg @@ -6,7 +6,8 @@ exclude = .git, __pycache__, docs, - */vendor + */vendor, + website max-complexity = 30 diff --git a/setup.py b/setup.py index fd589e5251..c096befa34 100644 --- a/setup.py +++ b/setup.py @@ -45,7 +45,8 @@ install_requires = [ "googleapiclient", "httplib2", # Harmony implementation - "filecmp" + "filecmp", + "dns" ] includes = [] @@ -69,7 +70,11 @@ if sys.platform == "win32": "pythoncom" ]) -build_options = dict( + +icon_path = openpype_root / "igniter" / "openpype.ico" +mac_icon_path = openpype_root / "igniter" / "openpype.icns" + +build_exe_options = dict( packages=install_requires, includes=includes, excludes=excludes, @@ -78,13 +83,16 @@ build_options = dict( optimize=0 ) -icon_path = openpype_root / "igniter" / "openpype.ico" +bdist_mac_options = dict( + bundle_name="OpenPype", + iconfile=mac_icon_path +) executables = [ - Executable("start.py", base=None, - target_name="openpype_console", icon=icon_path.as_posix()), Executable("start.py", base=base, - target_name="openpype_gui", icon=icon_path.as_posix()) + target_name="openpype_gui", icon=icon_path.as_posix()), + Executable("start.py", base=None, + target_name="openpype_console", icon=icon_path.as_posix()) ] setup( @@ -93,7 +101,8 @@ setup( description="Ultimate pipeline", cmdclass={"build_sphinx": BuildDoc}, options={ - "build_exe": build_options, + "build_exe": build_exe_options, + "bdist_mac": bdist_mac_options, "build_sphinx": { "project": "OpenPype", "version": __version__, diff --git a/start.py b/start.py index 1f946a705c..0295d0ca62 100644 --- a/start.py +++ b/start.py @@ -100,11 +100,22 @@ import subprocess import site from pathlib import Path -# add dependencies folder to sys.pat for frozen code -if getattr(sys, 'frozen', False): +# OPENPYPE_ROOT is variable pointing to build (or code) directory +# WARNING `OPENPYPE_ROOT` must be defined before igniter import +# - igniter changes cwd which cause that filepath of this script won't lead +# to right directory +if not getattr(sys, 'frozen', False): + # Code root defined by `start.py` directory + OPENPYPE_ROOT = os.path.dirname(os.path.abspath(__file__)) +else: + OPENPYPE_ROOT = os.path.dirname(sys.executable) + + # add dependencies folder to sys.pat for frozen code frozen_libs = os.path.normpath( - os.path.join(os.path.dirname(sys.executable), "dependencies")) + os.path.join(OPENPYPE_ROOT, "dependencies") + ) sys.path.append(frozen_libs) + sys.path.insert(0, OPENPYPE_ROOT) # add stuff from `/dependencies` to PYTHONPATH. pythonpath = os.getenv("PYTHONPATH", "") paths = pythonpath.split(os.pathsep) @@ -113,7 +124,10 @@ if getattr(sys, 'frozen', False): import igniter # noqa: E402 from igniter import BootstrapRepos # noqa: E402 -from igniter.tools import get_openpype_path_from_db # noqa +from igniter.tools import ( + get_openpype_path_from_db, + validate_mongo_connection +) # noqa from igniter.bootstrap_repos import OpenPypeVersion # noqa: E402 bootstrap = BootstrapRepos() @@ -275,6 +289,10 @@ def _process_arguments() -> tuple: if return_code not in [2, 3]: sys.exit(return_code) + idx = sys.argv.index("igniter") + sys.argv.pop(idx) + sys.argv.insert(idx, "tray") + return use_version, use_staging @@ -296,19 +314,31 @@ def _determine_mongodb() -> str: if not openpype_mongo: # try system keyring try: - openpype_mongo = bootstrap.registry.get_secure_item( + openpype_mongo = bootstrap.secure_registry.get_item( + "openPypeMongo" + ) + except ValueError: + pass + + if openpype_mongo: + result, msg = validate_mongo_connection(openpype_mongo) + if not result: + print(msg) + openpype_mongo = None + + if not openpype_mongo: + print("*** No DB connection string specified.") + print("--- launching setup UI ...") + + result = igniter.open_dialog() + if result == 0: + raise RuntimeError("MongoDB URL was not defined") + + try: + openpype_mongo = bootstrap.secure_registry.get_item( "openPypeMongo") except ValueError: - print("*** No DB connection string specified.") - print("--- launching setup UI ...") - import igniter - igniter.open_dialog() - - try: - openpype_mongo = bootstrap.registry.get_secure_item( - "openPypeMongo") - except ValueError: - raise RuntimeError("missing mongodb url") + raise RuntimeError("Missing MongoDB url") return openpype_mongo @@ -316,22 +346,25 @@ def _determine_mongodb() -> str: def _initialize_environment(openpype_version: OpenPypeVersion) -> None: version_path = openpype_version.path os.environ["OPENPYPE_VERSION"] = openpype_version.version - # set OPENPYPE_ROOT to point to currently used OpenPype version. - os.environ["OPENPYPE_ROOT"] = os.path.normpath(version_path.as_posix()) + # set OPENPYPE_REPOS_ROOT to point to currently used OpenPype version. + os.environ["OPENPYPE_REPOS_ROOT"] = os.path.normpath( + version_path.as_posix() + ) # inject version to Python environment (sys.path, ...) print(">>> Injecting OpenPype version to running environment ...") bootstrap.add_paths_from_directory(version_path) - # Additional sys paths related to OPENPYPE_ROOT directory - # TODO move additional paths to `boot` part when OPENPYPE_ROOT will point - # to same hierarchy from code and from frozen OpenPype + # Additional sys paths related to OPENPYPE_REPOS_ROOT directory + # TODO move additional paths to `boot` part when OPENPYPE_REPOS_ROOT will + # point to same hierarchy from code and from frozen OpenPype additional_paths = [ + os.environ["OPENPYPE_REPOS_ROOT"], # add OpenPype tools - os.path.join(os.environ["OPENPYPE_ROOT"], "openpype", "tools"), + os.path.join(os.environ["OPENPYPE_REPOS_ROOT"], "openpype", "tools"), # add common OpenPype vendor # (common for multiple Python interpreter versions) os.path.join( - os.environ["OPENPYPE_ROOT"], + os.environ["OPENPYPE_REPOS_ROOT"], "openpype", "vendor", "python", @@ -352,7 +385,7 @@ def _find_frozen_openpype(use_version: str = None, """Find OpenPype to run from frozen code. This will process and modify environment variables: - ``PYTHONPATH``, ``OPENPYPE_VERSION``, ``OPENPYPE_ROOT`` + ``PYTHONPATH``, ``OPENPYPE_VERSION``, ``OPENPYPE_REPOS_ROOT`` Args: use_version (str, optional): Try to use specified version. @@ -464,16 +497,10 @@ def _bootstrap_from_code(use_version): # run through repos and add them to `sys.path` and `PYTHONPATH` # set root if getattr(sys, 'frozen', False): - openpype_root = os.path.normpath( - os.path.dirname(sys.executable)) - local_version = bootstrap.get_version(Path(openpype_root)) + local_version = bootstrap.get_version(Path(OPENPYPE_ROOT)) print(f" - running version: {local_version}") assert local_version else: - openpype_root = os.path.normpath( - os.path.dirname( - os.path.dirname( - os.path.realpath(igniter.__file__)))) # get current version of OpenPype local_version = bootstrap.get_local_live_version() @@ -487,15 +514,18 @@ def _bootstrap_from_code(use_version): bootstrap.add_paths_from_directory(version_path) os.environ["OPENPYPE_VERSION"] = use_version else: - version_path = openpype_root - os.environ["OPENPYPE_ROOT"] = openpype_root - repos = os.listdir(os.path.join(openpype_root, "repos")) - repos = [os.path.join(openpype_root, "repos", repo) for repo in repos] + version_path = OPENPYPE_ROOT + + repos = os.listdir(os.path.join(OPENPYPE_ROOT, "repos")) + repos = [os.path.join(OPENPYPE_ROOT, "repos", repo) for repo in repos] # add self to python paths - repos.insert(0, openpype_root) + repos.insert(0, OPENPYPE_ROOT) for repo in repos: sys.path.insert(0, repo) + # Set OPENPYPE_REPOS_ROOT to code root + os.environ["OPENPYPE_REPOS_ROOT"] = OPENPYPE_ROOT + # add venv 'site-packages' to PYTHONPATH python_path = os.getenv("PYTHONPATH", "") split_paths = python_path.split(os.pathsep) @@ -506,15 +536,15 @@ def _bootstrap_from_code(use_version): # in case when we are running without any version installed. if not getattr(sys, 'frozen', False): split_paths.append(site.getsitepackages()[-1]) - # TODO move additional paths to `boot` part when OPENPYPE_ROOT will point - # to same hierarchy from code and from frozen OpenPype + # TODO move additional paths to `boot` part when OPENPYPE_ROOT will + # point to same hierarchy from code and from frozen OpenPype additional_paths = [ # add OpenPype tools - os.path.join(os.environ["OPENPYPE_ROOT"], "openpype", "tools"), + os.path.join(OPENPYPE_ROOT, "openpype", "tools"), # add common OpenPype vendor # (common for multiple Python interpreter versions) os.path.join( - os.environ["OPENPYPE_ROOT"], + OPENPYPE_ROOT, "openpype", "vendor", "python", @@ -533,6 +563,11 @@ def _bootstrap_from_code(use_version): def boot(): """Bootstrap OpenPype.""" + # ------------------------------------------------------------------------ + # Set environment to OpenPype root path + # ------------------------------------------------------------------------ + os.environ["OPENPYPE_ROOT"] = OPENPYPE_ROOT + # ------------------------------------------------------------------------ # Play animation # ------------------------------------------------------------------------ @@ -563,16 +598,6 @@ def boot(): os.environ["OPENPYPE_MONGO"] = openpype_mongo os.environ["OPENPYPE_DATABASE_NAME"] = "openpype" # name of Pype database - # ------------------------------------------------------------------------ - # Set environments - load OpenPype path from database (if set) - # ------------------------------------------------------------------------ - # set OPENPYPE_ROOT to running location until proper version can be - # determined. - if getattr(sys, 'frozen', False): - os.environ["OPENPYPE_ROOT"] = os.path.dirname(sys.executable) - else: - os.environ["OPENPYPE_ROOT"] = os.path.dirname(__file__) - # Get openpype path from database and set it to environment so openpype can # find its versions there and bootstrap them. openpype_path = get_openpype_path_from_db(openpype_mongo) @@ -585,7 +610,7 @@ def boot(): # ------------------------------------------------------------------------ # Find OpenPype versions # ------------------------------------------------------------------------ - # WARNING: Environment OPENPYPE_ROOT may change if frozen OpenPype + # WARNING: Environment OPENPYPE_REPOS_ROOT may change if frozen OpenPype # is executed if getattr(sys, 'frozen', False): # find versions of OpenPype to be used with frozen code @@ -602,12 +627,6 @@ def boot(): # or to `openpype` or `openpype_console` in case of frozen code os.environ["OPENPYPE_EXECUTABLE"] = sys.executable - if getattr(sys, 'frozen', False): - os.environ["OPENPYPE_REPOS_ROOT"] = os.environ["OPENPYPE_ROOT"] - else: - os.environ["OPENPYPE_REPOS_ROOT"] = os.path.join( - os.environ["OPENPYPE_ROOT"], "repos") - # delete OpenPype module and it's submodules from cache so it is used from # specific version modules_to_del = [ @@ -677,7 +696,9 @@ def get_info() -> list: inf.append(("OpenPype variant", "staging")) else: inf.append(("OpenPype variant", "production")) - inf.append(("Running OpenPype from", os.environ.get('OPENPYPE_ROOT'))) + inf.append( + ("Running OpenPype from", os.environ.get('OPENPYPE_REPOS_ROOT')) + ) inf.append(("Using mongodb", components["host"])) if os.environ.get("FTRACK_SERVER"): diff --git a/test_localsystem.txt b/test_localsystem.txt new file mode 100644 index 0000000000..dde7986af8 --- /dev/null +++ b/test_localsystem.txt @@ -0,0 +1 @@ +I have run diff --git a/tests/igniter/test_bootstrap_repos.py b/tests/igniter/test_bootstrap_repos.py index 75996b4026..6c70380ab6 100644 --- a/tests/igniter/test_bootstrap_repos.py +++ b/tests/igniter/test_bootstrap_repos.py @@ -11,7 +11,7 @@ import pytest from igniter.bootstrap_repos import BootstrapRepos from igniter.bootstrap_repos import PypeVersion -from pype.lib import PypeSettingsRegistry +from pype.lib import OpenPypeSettingsRegistry @pytest.fixture @@ -348,7 +348,7 @@ def test_find_pype(fix_bootstrap, tmp_path_factory, monkeypatch, printer): return d_path.as_posix() monkeypatch.setattr(appdirs, "user_data_dir", mock_user_data_dir) - fix_bootstrap.registry = PypeSettingsRegistry() + fix_bootstrap.registry = OpenPypeSettingsRegistry() fix_bootstrap.registry.set_item("pypePath", d_path.as_posix()) result = fix_bootstrap.find_pype(include_zips=True) diff --git a/tests/pype/lib/test_user_settings.py b/tests/pype/lib/test_user_settings.py index 7f0f400f59..02342abbc9 100644 --- a/tests/pype/lib/test_user_settings.py +++ b/tests/pype/lib/test_user_settings.py @@ -1,10 +1,20 @@ import pytest -from pype.lib import IniSettingRegistry -from pype.lib import JSONSettingRegistry +from pype.lib import ( + IniSettingRegistry, + JSONSettingRegistry, + OpenPypeSecureRegistry +) from uuid import uuid4 import configparser +@pytest.fixture +def secure_registry(tmpdir): + name = "pypetest_{}".format(str(uuid4())) + r = OpenPypeSecureRegistry(name, tmpdir) + yield r + + @pytest.fixture def json_registry(tmpdir): name = "pypetest_{}".format(str(uuid4())) @@ -19,21 +29,21 @@ def ini_registry(tmpdir): yield r -def test_keyring(json_registry): - json_registry.set_secure_item("item1", "foo") - json_registry.set_secure_item("item2", "bar") - result1 = json_registry.get_secure_item("item1") - result2 = json_registry.get_secure_item("item2") +def test_keyring(secure_registry): + secure_registry.set_item("item1", "foo") + secure_registry.set_item("item2", "bar") + result1 = secure_registry.get_item("item1") + result2 = secure_registry.get_item("item2") assert result1 == "foo" assert result2 == "bar" - json_registry.delete_secure_item("item1") - json_registry.delete_secure_item("item2") + secure_registry.delete_item("item1") + secure_registry.delete_item("item2") with pytest.raises(ValueError): - json_registry.get_secure_item("item1") - json_registry.get_secure_item("item2") + secure_registry.get_item("item1") + secure_registry.get_item("item2") def test_ini_registry(ini_registry): diff --git a/tools/build.ps1 b/tools/build.ps1 index a6c76dfafb..5283ee4754 100644 --- a/tools/build.ps1 +++ b/tools/build.ps1 @@ -121,6 +121,10 @@ catch { Exit-WithCode 1 } +Write-Host ">>> " -NoNewLine -ForegroundColor green +Write-Host "Making sure submodules are up-to-date ..." +git submodule update --init --recursive + Write-Host ">>> " -NoNewline -ForegroundColor green Write-Host "Building OpenPype [ " -NoNewline -ForegroundColor white Write-host $openpype_version -NoNewline -ForegroundColor green @@ -157,11 +161,12 @@ Write-Host "Reading Poetry ... " -NoNewline if (-not (Test-Path -PathType Container -Path "$($env:USERPROFILE)\.poetry\bin")) { Write-Host "NOT FOUND" -ForegroundColor Yellow Install-Poetry + Write-Host "INSTALLED" -ForegroundColor Cyan } else { Write-Host "OK" -ForegroundColor Green } - +$env:PATH = "$($env:PATH);$($env:USERPROFILE)\.poetry\bin" Write-Host ">>> " -NoNewline -ForegroundColor green Write-Host "Cleaning cache files ... " -NoNewline @@ -172,6 +177,7 @@ Write-Host "OK" -ForegroundColor green Write-Host ">>> " -NoNewline -ForegroundColor green Write-Host "Building OpenPype ..." + $out = & poetry run python setup.py build 2>&1 if ($LASTEXITCODE -ne 0) { diff --git a/tools/build.sh b/tools/build.sh index b95e2969c4..d0593a2b2f 100755 --- a/tools/build.sh +++ b/tools/build.sh @@ -157,10 +157,33 @@ main () { install_poetry || { echo -e "${BIRed}!!!${RST} Poetry installation failed"; return; } fi + echo -e "${BIGreen}>>>${RST} Making sure submodules are up-to-date ..." + git submodule update --init --recursive + echo -e "${BIGreen}>>>${RST} Building ..." - poetry run python3 "$openpype_root/setup.py" build > "$openpype_root/build/build.log" || { echo -e "${BIRed}!!!${RST} Build failed, see the build log."; return; } + if [[ "$OSTYPE" == "linux-gnu"* ]]; then + poetry run python3 "$openpype_root/setup.py" build > "$openpype_root/build/build.log" || { echo -e "${BIRed}!!!${RST} Build failed, see the build log."; return; } + elif [[ "$OSTYPE" == "darwin"* ]]; then + poetry run python3 "$openpype_root/setup.py" bdist_mac > "$openpype_root/build/build.log" || { echo -e "${BIRed}!!!${RST} Build failed, see the build log."; return; } + fi poetry run python3 "$openpype_root/tools/build_dependencies.py" + if [[ "$OSTYPE" == "darwin"* ]]; then + # fix code signing issue + codesign --remove-signature "$openpype_root/build/OpenPype.app/Contents/MacOS/lib/Python" + if command -v create-dmg > /dev/null 2>&1; then + create-dmg \ + --volname "OpenPype Installer" \ + --window-pos 200 120 \ + --window-size 600 300 \ + --app-drop-link 100 50 \ + "$openpype_root/build/OpenPype-Installer.dmg" \ + "$openpype_root/build/OpenPype.app" + else + echo -e "${BIYellow}!!!${RST} ${BIWhite}create-dmg${RST} command is not available." + fi + fi + echo -e "${BICyan}>>>${RST} All done. You will find OpenPype and build log in \c" echo -e "${BIWhite}$openpype_root/build${RST} directory." } diff --git a/tools/build_dependencies.py b/tools/build_dependencies.py index e49e930a70..fb52e2b5fd 100644 --- a/tools/build_dependencies.py +++ b/tools/build_dependencies.py @@ -22,6 +22,7 @@ import os import sys import site from distutils.util import get_platform +import platform from pathlib import Path import shutil import blessed @@ -76,7 +77,14 @@ _print(f"Working with: {site_pkg}", 2) build_dir = "exe.{}-{}".format(get_platform(), sys.version[0:3]) # create full path -build_dir = Path(os.path.dirname(__file__)).parent / "build" / build_dir +if platform.system().lower() == "darwin": + build_dir = Path(os.path.dirname(__file__)).parent.joinpath( + "build", + "OpenPype.app", + "Contents", + "MacOS") +else: + build_dir = Path(os.path.dirname(__file__)).parent / "build" / build_dir _print(f"Using build at {build_dir}", 2) if not build_dir.exists(): diff --git a/tools/build_win_installer.ps1 b/tools/build_win_installer.ps1 new file mode 100644 index 0000000000..4a4d011258 --- /dev/null +++ b/tools/build_win_installer.ps1 @@ -0,0 +1,140 @@ +<# +.SYNOPSIS + Helper script to build OpenPype. + +.DESCRIPTION + This script will detect Python installation, and build OpenPype to `build` + directory using existing virtual environment created by Poetry (or + by running `/tools/create_venv.ps1`). It will then shuffle dependencies in + build folder to optimize for different Python versions (2/3) in Python host. + +.EXAMPLE + +PS> .\build.ps1 + +#> + +function Start-Progress { + param([ScriptBlock]$code) + $scroll = "/-\|/-\|" + $idx = 0 + $job = Invoke-Command -ComputerName $env:ComputerName -ScriptBlock { $code } -AsJob + + $origpos = $host.UI.RawUI.CursorPosition + + # $origpos.Y -= 1 + + while (($job.State -eq "Running") -and ($job.State -ne "NotStarted")) + { + $host.UI.RawUI.CursorPosition = $origpos + Write-Host $scroll[$idx] -NoNewline + $idx++ + if ($idx -ge $scroll.Length) + { + $idx = 0 + } + Start-Sleep -Milliseconds 100 + } + # It's over - clear the activity indicator. + $host.UI.RawUI.CursorPosition = $origpos + Write-Host ' ' + <# + .SYNOPSIS + Display spinner for running job + .PARAMETER code + Job to display spinner for + #> +} + + +function Exit-WithCode($exitcode) { + # Only exit this host process if it's a child of another PowerShell parent process... + $parentPID = (Get-CimInstance -ClassName Win32_Process -Filter "ProcessId=$PID" | Select-Object -Property ParentProcessId).ParentProcessId + $parentProcName = (Get-CimInstance -ClassName Win32_Process -Filter "ProcessId=$parentPID" | Select-Object -Property Name).Name + if ('powershell.exe' -eq $parentProcName) { $host.SetShouldExit($exitcode) } + + exit $exitcode +} + +function Show-PSWarning() { + if ($PSVersionTable.PSVersion.Major -lt 7) { + Write-Host "!!! " -NoNewline -ForegroundColor Red + Write-Host "You are using old version of PowerShell. $($PSVersionTable.PSVersion.Major).$($PSVersionTable.PSVersion.Minor)" + Write-Host "Please update to at least 7.0 - " -NoNewline -ForegroundColor Gray + Write-Host "https://github.com/PowerShell/PowerShell/releases" -ForegroundColor White + Exit-WithCode 1 + } +} + +function Install-Poetry() { + Write-Host ">>> " -NoNewline -ForegroundColor Green + Write-Host "Installing Poetry ... " + (Invoke-WebRequest -Uri https://raw.githubusercontent.com/python-poetry/poetry/master/get-poetry.py -UseBasicParsing).Content | python - + # add it to PATH + $env:PATH = "$($env:PATH);$($env:USERPROFILE)\.poetry\bin" +} + +$art = @" + +β–’β–ˆβ–€β–€β–€β–ˆ β–ˆβ–€β–€β–ˆ β–ˆβ–€β–€ β–ˆβ–€β–€β–„ β–’β–ˆβ–€β–€β–ˆ β–ˆβ–‘β–‘β–ˆ β–ˆβ–€β–€β–ˆ β–ˆβ–€β–€ β–€β–ˆβ–€ β–€β–ˆβ–€ β–€β–ˆβ–€ +β–’β–ˆβ–‘β–‘β–’β–ˆ β–ˆβ–‘β–‘β–ˆ β–ˆβ–€β–€ β–ˆβ–‘β–‘β–ˆ β–’β–ˆβ–„β–„β–ˆ β–ˆβ–„β–„β–ˆ β–ˆβ–‘β–‘β–ˆ β–ˆβ–€β–€ β–’β–ˆβ–‘ β–’β–ˆβ–‘ β–’β–ˆβ–‘ +β–’β–ˆβ–„β–„β–„β–ˆ β–ˆβ–€β–€β–€ β–€β–€β–€ β–€β–‘β–‘β–€ β–’β–ˆβ–‘β–‘β–‘ β–„β–„β–„β–ˆ β–ˆβ–€β–€β–€ β–€β–€β–€ β–„β–ˆβ–„ β–„β–ˆβ–„ β–„β–ˆβ–„ + .---= [ by Pype Club ] =---. + https://openpype.io + +"@ + +Write-Host $art -ForegroundColor DarkGreen + +# Enable if PS 7.x is needed. +# Show-PSWarning + +$current_dir = Get-Location +$script_dir = Split-Path -Path $MyInvocation.MyCommand.Definition -Parent +$openpype_root = (Get-Item $script_dir).parent.FullName + +Set-Location -Path $openpype_root + +$version_file = Get-Content -Path "$($openpype_root)\openpype\version.py" +$result = [regex]::Matches($version_file, '__version__ = "(?\d+\.\d+.\d+.*)"') +$openpype_version = $result[0].Groups['version'].Value +if (-not $openpype_version) { + Write-Host "!!! " -ForegroundColor yellow -NoNewline + Write-Host "Cannot determine OpenPype version." + Exit-WithCode 1 +} +$env:BUILD_VERSION = $openpype_version + +iscc + +Write-Host ">>> " -NoNewline -ForegroundColor green +Write-Host "Creating OpenPype installer ... " -ForegroundColor white + +$build_dir_command = @" +import sys +from distutils.util import get_platform +print('exe.{}-{}'.format(get_platform(), sys.version[0:3])) +"@ + +$build_dir = & python -c $build_dir_command +Write-Host "Build directory ... ${build_dir}" -ForegroundColor white +$env:BUILD_DIR = $build_dir + +if (Get-Command iscc -errorAction SilentlyContinue -ErrorVariable ProcessError) +{ + iscc "$openpype_root\inno_setup.iss" +}else { + Write-Host "!!! Cannot find Inno Setup command" -ForegroundColor red + Write-Host "!!! You can download it at https://jrsoftware.org/" -ForegroundColor red + Exit-WithCode 1 +} + + +Write-Host ">>> " -NoNewline -ForegroundColor green +Write-Host "restoring current directory" +Set-Location -Path $current_dir + +Write-Host "*** " -NoNewline -ForegroundColor Cyan +Write-Host "All done. You will find OpenPype installer in " -NoNewLine +Write-Host "'.\build'" -NoNewline -ForegroundColor Green +Write-Host " directory." diff --git a/tools/create_env.ps1 b/tools/create_env.ps1 index 44e1799be8..e72e98e04b 100644 --- a/tools/create_env.ps1 +++ b/tools/create_env.ps1 @@ -133,7 +133,7 @@ if (-not (Test-Path -PathType Leaf -Path "$($openpype_root)\poetry.lock")) { Write-Host ">>> " -NoNewline -ForegroundColor green Write-Host "Installing virtual environment from lock." } -& poetry install $poetry_verbosity +& poetry install --no-root $poetry_verbosity if ($LASTEXITCODE -ne 0) { Write-Host "!!! " -ForegroundColor yellow -NoNewline Write-Host "Poetry command failed." diff --git a/tools/create_env.sh b/tools/create_env.sh index 6ca0731963..04414ddea5 100755 --- a/tools/create_env.sh +++ b/tools/create_env.sh @@ -160,7 +160,7 @@ main () { echo -e "${BIGreen}>>>${RST} Installing dependencies ..." fi - poetry install $poetry_verbosity || { echo -e "${BIRed}!!!${RST} Poetry environment installation failed"; return; } + poetry install --no-root $poetry_verbosity || { echo -e "${BIRed}!!!${RST} Poetry environment installation failed"; return; } echo -e "${BIGreen}>>>${RST} Cleaning cache files ..." clean_pyc @@ -172,6 +172,7 @@ main () { poetry run python -m pip install --upgrade pip poetry run pip install --force-reinstall setuptools poetry run pip install --force-reinstall wheel + poetry run python -m pip install --force-reinstall pip } main -3 diff --git a/tools/fetch_thirdparty_libs.ps1 b/tools/fetch_thirdparty_libs.ps1 new file mode 100644 index 0000000000..d1b914fac2 --- /dev/null +++ b/tools/fetch_thirdparty_libs.ps1 @@ -0,0 +1,20 @@ +<# +.SYNOPSIS + Download and extract third-party dependencies for OpenPype. + +.DESCRIPTION + This will download third-party dependencies specified in pyproject.toml + and extract them to vendor/bin folder. + +.EXAMPLE + +PS> .\fetch_thirdparty_libs.ps1 + +#> +$current_dir = Get-Location +$script_dir = Split-Path -Path $MyInvocation.MyCommand.Definition -Parent +$openpype_root = (Get-Item $script_dir).parent.FullName +Set-Location -Path $openpype_root + +& poetry run python "$($openpype_root)\tools\fetch_thirdparty_libs.py" +Set-Location -Path $current_dir diff --git a/tools/fetch_thirdparty_libs.py b/tools/fetch_thirdparty_libs.py new file mode 100644 index 0000000000..75ee052950 --- /dev/null +++ b/tools/fetch_thirdparty_libs.py @@ -0,0 +1,160 @@ +# -*- coding: utf-8 -*- +"""Fetch, verify and process third-party dependencies of OpenPype. + +Those should be defined in `pyproject.toml` in OpenPype sources root. + +""" +import os +import sys +import toml +import shutil +from pathlib import Path +from urllib.parse import urlparse +import requests +import enlighten +import platform +import blessed +import tempfile +import math +import hashlib +import tarfile +import zipfile +import time + + +term = blessed.Terminal() +manager = enlighten.get_manager() +hash_buffer_size = 65536 + + +def sha256_sum(filename: Path): + """Calculate sha256 hash for given file. + + Args: + filename (Path): path to file. + + Returns: + str: hex hash. + + """ + _hash = hashlib.sha256() + with open(filename, 'rb', buffering=0) as f: + buffer = bytearray(128 * 1024) + mv = memoryview(buffer) + for n in iter(lambda: f.readinto(mv), 0): + _hash.update(mv[:n]) + return _hash.hexdigest() + + +def _print(msg: str, message_type: int = 0) -> None: + """Print message to console. + + Args: + msg (str): message to print + message_type (int): type of message (0 info, 1 error, 2 note) + + """ + if message_type == 0: + header = term.aquamarine3(">>> ") + elif message_type == 1: + header = term.orangered2("!!! ") + elif message_type == 2: + header = term.tan1("... ") + else: + header = term.darkolivegreen3("--- ") + + print("{}{}".format(header, msg)) + + +_print("Processing third-party dependencies ...") +start_time = time.time_ns() +openpype_root = Path(os.path.dirname(__file__)).parent +pyproject = toml.load(openpype_root / "pyproject.toml") +platform_name = platform.system().lower() + +try: + thirdparty = pyproject["openpype"]["thirdparty"] +except AttributeError: + _print("No third-party libraries specified in pyproject.toml", 1) + sys.exit(1) + +for k, v in thirdparty.items(): + _print(f"processing {k}") + destination_path = openpype_root / "vendor" / "bin" / k / platform_name + url = v.get(platform_name).get("url") + + if not v.get(platform_name): + _print(("missing definition for current " + f"platform [ {platform_name} ]"), 1) + sys.exit(1) + + parsed_url = urlparse(url) + + # check if file is already extracted in /vendor/bin + if destination_path.exists(): + _print("destination path already exists, deleting ...", 2) + if destination_path.is_dir(): + try: + shutil.rmtree(destination_path) + except OSError as e: + _print("cannot delete folder.", 1) + raise SystemExit(e) + + # download file + _print(f"Downloading {url} ...") + with tempfile.TemporaryDirectory() as temp_dir: + temp_file = Path(temp_dir) / Path(parsed_url.path).name + + r = requests.get(url, stream=True) + content_len = int(r.headers.get('Content-Length', '0')) or None + with manager.counter(color='green', + total=content_len and math.ceil(content_len / 2 ** 20), # noqa: E501 + unit='MiB', leave=False) as counter: + with open(temp_file, 'wb', buffering=2 ** 24) as file_handle: + for chunk in r.iter_content(chunk_size=2 ** 20): + file_handle.write(chunk) + counter.update() + + # get file with checksum + _print("Calculating sha256 ...", 2) + calc_checksum = sha256_sum(temp_file) + if v.get(platform_name).get("hash") != calc_checksum: + _print("Downloaded files checksum invalid.") + sys.exit(1) + + _print("File OK", 3) + if not destination_path.exists(): + destination_path.mkdir(parents=True) + + # extract to destination + archive_type = temp_file.suffix.lstrip(".") + _print(f"Extracting {archive_type} file to {destination_path}") + if archive_type in ['zip']: + zip_file = zipfile.ZipFile(temp_file) + zip_file.extractall(destination_path) + zip_file.close() + + elif archive_type in [ + 'tar', 'tgz', 'tar.gz', 'tar.xz', 'tar.bz2' + ]: + if archive_type == 'tar': + tar_type = 'r:' + elif archive_type.endswith('xz'): + tar_type = 'r:xz' + elif archive_type.endswith('gz'): + tar_type = 'r:gz' + elif archive_type.endswith('bz2'): + tar_type = 'r:bz2' + else: + tar_type = 'r:*' + try: + tar_file = tarfile.open(temp_file, tar_type) + except tarfile.ReadError: + raise SystemExit("corrupted archive") + tar_file.extractall(destination_path) + tar_file.close() + _print("Extraction OK", 3) + +end_time = time.time_ns() +total_time = (end_time - start_time) / 1000000000 +_print(f"Downloading and extracting took {total_time} secs.") diff --git a/tools/fetch_thirdparty_libs.sh b/tools/fetch_thirdparty_libs.sh new file mode 100755 index 0000000000..e305b4b3e4 --- /dev/null +++ b/tools/fetch_thirdparty_libs.sh @@ -0,0 +1,129 @@ +#!/usr/bin/env bash + +# Run Pype Tray + + +art () { + cat <<-EOF + ____________ + /\\ ___ \\ + \\ \\ \\/_\\ \\ + \\ \\ _____/ ______ ___ ___ ___ + \\ \\ \\___/ /\\ \\ \\ \\\\ \\\\ \\ + \\ \\____\\ \\ \\_____\\ \\__\\\\__\\\\__\\ + \\/____/ \\/_____/ . PYPE Club . + +EOF +} + +# Colors for terminal + +RST='\033[0m' # Text Reset + +# Regular Colors +Black='\033[0;30m' # Black +Red='\033[0;31m' # Red +Green='\033[0;32m' # Green +Yellow='\033[0;33m' # Yellow +Blue='\033[0;34m' # Blue +Purple='\033[0;35m' # Purple +Cyan='\033[0;36m' # Cyan +White='\033[0;37m' # White + +# Bold +BBlack='\033[1;30m' # Black +BRed='\033[1;31m' # Red +BGreen='\033[1;32m' # Green +BYellow='\033[1;33m' # Yellow +BBlue='\033[1;34m' # Blue +BPurple='\033[1;35m' # Purple +BCyan='\033[1;36m' # Cyan +BWhite='\033[1;37m' # White + +# Bold High Intensity +BIBlack='\033[1;90m' # Black +BIRed='\033[1;91m' # Red +BIGreen='\033[1;92m' # Green +BIYellow='\033[1;93m' # Yellow +BIBlue='\033[1;94m' # Blue +BIPurple='\033[1;95m' # Purple +BICyan='\033[1;96m' # Cyan +BIWhite='\033[1;97m' # White + + +############################################################################## +# Detect required version of python +# Globals: +# colors +# PYTHON +# Arguments: +# None +# Returns: +# None +############################################################################### +detect_python () { + echo -e "${BIGreen}>>>${RST} Using python \c" + local version_command="import sys;print('{0}.{1}'.format(sys.version_info[0], sys.version_info[1]))" + local python_version="$(python3 <<< ${version_command})" + oIFS="$IFS" + IFS=. + set -- $python_version + IFS="$oIFS" + if [ "$1" -ge "3" ] && [ "$2" -ge "6" ] ; then + if [ "$2" -gt "7" ] ; then + echo -e "${BIWhite}[${RST} ${BIRed}$1.$2 ${BIWhite}]${RST} - ${BIRed}FAILED${RST} ${BIYellow}Version is new and unsupported, use${RST} ${BIPurple}3.7.x${RST}"; return 1; + else + echo -e "${BIWhite}[${RST} ${BIGreen}$1.$2${RST} ${BIWhite}]${RST}" + fi + PYTHON="python3" + else + command -v python3 >/dev/null 2>&1 || { echo -e "${BIRed}$1.$2$ - ${BIRed}FAILED${RST} ${BIYellow}Version is old and unsupported${RST}"; return 1; } + fi +} + +############################################################################## +# Clean pyc files in specified directory +# Globals: +# None +# Arguments: +# Optional path to clean +# Returns: +# None +############################################################################### +clean_pyc () { + local path + path=$pype_root + echo -e "${BIGreen}>>>${RST} Cleaning pyc at [ ${BIWhite}$path${RST} ] ... \c" + find "$path" -regex '^.*\(__pycache__\|\.py[co]\)$' -delete + echo -e "${BIGreen}DONE${RST}" +} + +############################################################################## +# Return absolute path +# Globals: +# None +# Arguments: +# Path to resolve +# Returns: +# None +############################################################################### +realpath () { + echo $(cd $(dirname "$1"); pwd)/$(basename "$1") +} + +# Main +main () { + echo -e "${BGreen}" + art + echo -e "${RST}" + detect_python || return 1 + + # Directories + pype_root=$(realpath $(dirname $(dirname "${BASH_SOURCE[0]}"))) + pushd "$pype_root" > /dev/null || return > /dev/null + + echo -e "${BIGreen}>>>${RST} Running Pype tool ..." + poetry run python3 "$pype_root/tools/fetch_thirdparty_libs.py" +} + +main \ No newline at end of file diff --git a/tools/run_mongo.sh b/tools/run_mongo.sh index 1c788abcaf..8c94fcf881 100755 --- a/tools/run_mongo.sh +++ b/tools/run_mongo.sh @@ -82,3 +82,4 @@ main () { echo -e "${BIGreen}>>>${RST} Detached to background." } +main diff --git a/vendor/deadline/custom/plugins/GlobalJobPreLoad.py b/vendor/deadline/custom/plugins/GlobalJobPreLoad.py index d1287dd213..5e64605271 100644 --- a/vendor/deadline/custom/plugins/GlobalJobPreLoad.py +++ b/vendor/deadline/custom/plugins/GlobalJobPreLoad.py @@ -60,7 +60,7 @@ def inject_openpype_environment(deadlinePlugin): with open(export_url) as fp: contents = json.load(fp) for key, value in contents.items(): - deadlinePlugin.SetEnvironmentVariable(key, value) + deadlinePlugin.SetProcessEnvironmentVariable(key, value) os.remove(export_url) @@ -162,4 +162,3 @@ def __main__(deadlinePlugin): inject_openpype_environment(deadlinePlugin) else: pype(deadlinePlugin) # backward compatibility with Pype2 - diff --git a/website/docs/admin_distribute.md b/website/docs/admin_distribute.md new file mode 100644 index 0000000000..b0ab71e2f3 --- /dev/null +++ b/website/docs/admin_distribute.md @@ -0,0 +1,50 @@ +--- +id: admin_distribute +title: Distribute +sidebar_label: Distribute +--- + +import Tabs from '@theme/Tabs'; +import TabItem from '@theme/TabItem'; + +To let your artists to use OpenPype, you'll need to distribute the frozen executables to them. + +Distribution consists of two parts + + ### 1. OpenPype Igniter + + This is the base application that will be installed locally on each workstation. + It is self contained (frozen) software that also includes all of the OpenPype codebase with the version + from the time of the build. + + Igniter package is around 500MB and preparing an updated version requires you to re-build pype. That would be + inconvenient for regular and quick distribution of production updates and fixes. So you can distribute those + independently, without requiring you artists to re-install every time. + + ### 2. OpenPype Codebase + +When you upgrade your studio pype deployment to a new version or make any local code changes, you can distribute +these changes to your artists, without the need of re-building OpenPype, by using `create_zip` tool provided. +The resulting zip needs to be made available to the artists and it will override their local OpenPype install +with the updated version. + +You have two ways of making this happen + +#### Automatic Updates + +Everytime and Artist launches OpenPype on their workstation, it will look to a pre-defined +[openPype update location](#self) for any versions that are newer than the +latest, locally installed version. If such version is found, it will be downloaded, +automatically extracted to the correct place and launched. This will become the default +version to run for the artist, until a higher version is detected in the update location again. + +#### Manual Updates + +If for some reason you don't want to use the automatic updates, you can distribute your +zips manually. Your artist will then have to unpack them to the correct place on their disk. + +The default locations are: + +- Windows: `C:\Users\%USERNAME%\AppData\Local\pypeclub\openpype` +- Linux: ` ` +- Mac: ` ` diff --git a/website/docs/admin_docsexamples.md b/website/docs/admin_docsexamples.md new file mode 100644 index 0000000000..6b4ae101d3 --- /dev/null +++ b/website/docs/admin_docsexamples.md @@ -0,0 +1,119 @@ +--- +id: admin_docsexamples +title: Examples of using notes +sidebar_label: docsexamples +--- + +import Tabs from '@theme/Tabs'; +import TabItem from '@theme/TabItem'; + + + + + +This is your mac stuff + + + + +This is your linux stuff + + + + + +This is your mac stuff + + + + + + + + + +:::note Name of the category + + + + + +This is your mac stuff + + + + +This is your linux stuff + + + + + +This is your mac stuff + + + + +::: + + +========================= + +:::important + +- This is my note +- another list +- super list + +```python +import os +print(os.environ) +``` + +::: + +:::tip +This is my note +::: + +:::note +This is my note +::: + +:::warning +This is my note +::: + +:::caution +This is my note +::: + +export const Highlight = ({children, color}) => ( + + {children} + +); + +Docusaurus green and Facebook blue are my favorite colors. + +I can write **Markdown** alongside my _JSX_! diff --git a/website/docs/admin_hosts_blender.md b/website/docs/admin_hosts_blender.md new file mode 100644 index 0000000000..0655e5341a --- /dev/null +++ b/website/docs/admin_hosts_blender.md @@ -0,0 +1,83 @@ +--- +id: admin_hosts_blender +title: Blender +sidebar_label: Blender +--- + +import Tabs from '@theme/Tabs'; +import TabItem from '@theme/TabItem'; + +## Blender requirements +Blender integration requires to use **PySide2** module inside blender. Module is different for Blender versions and platforms so can't be bundled with OpenPype. + +### How to install + +:::info Permissions +This step requires Admin persmission. +::: + + + + + +Find python executable inside your Blender installation folder. It is usually located in **C:\\Program Files\\Blender Foundation\\Blender {version}\\{version}\\python\\bin\\python.exe** (This may differ in future blender version). + +Open Powershell or Command Prompt as Administrator and run commands below. + +*Replace `C:\Program Files\Blender Foundation\Blender 2.83\2.83\python\bin` with your path.* + +```bash +# Change directory to python executable directory. +> cd C:\Program Files\Blender Foundation\Blender 2.83\2.83\python\bin + +# Run pip install command. +> python -m pip install PySide2 +``` + + + + + +Procedure may differ based on Linux distribution and blender distribution. Some Blender distributions are using system Python in that case it is required to install PySide2 using pip to system python (Not tested). + +**These instructions are for Blender using bundled python.** + +Find python executable inside your blender application. + +:::note Find python executable in Blender +You can launch Blender and in "Scripting" section enter commands to console. +```bash +>>> import bpy +>>> print(bpy.app.binary_path_python) +'/path/to/python/executable' +``` +::: + +Open terminal and run pip install command below. + +*Replace `/usr/bin/blender/2.83/python/bin/python3.7m` with your path.* +```bash +> /usr/bin/blender/2.83/python/bin/python3.7m -m pip install PySide2 +``` + +:::warning No module named pip +If you get error `No module named pip` you'll have to do few steps first. Open new terminal and run the python executable from Blender (entering full path). +```bash +# Run Python executable +> /usr/bin/blender/2.83/python/bin/python3.7m +# Python process should start +>>> import ensurepip +>>> ensurepip.bootstrap() +``` +You can close new terminal. Run pip install command above again. Now should work as expected. +::: + + + + diff --git a/website/docs/admin_openpype_commands.md b/website/docs/admin_openpype_commands.md new file mode 100644 index 0000000000..6e187c3c8a --- /dev/null +++ b/website/docs/admin_openpype_commands.md @@ -0,0 +1,140 @@ +--- +id: admin_openpype_commands +title: OpenPype Commands Reference +sidebar_label: OpenPype Commands +--- + + +## `tray` + +To launch Tray: +```sh +pype tray +``` + +### `--debug` + +To launch Tray with debugging information: +```sh +pype tray --debug +``` + +-------------------- + + +## `eventserver` + +This command launches ftrack event server. + +This should be ideally used by system service (such us systemd or upstart +on linux and window service). + +You have to set either proper environment variables to provide URL and +credentials or use option to specify them. If you use `--store_credentials` +provided credentials will be stored for later use. + +To run ftrack event server: +```sh +pype eventserver --ftrack-url= --ftrack-user= --ftrack-api-key= --ftrack-events-path= --no-stored-credentials --store-credentials +``` + + +### `--debug` +- print debug info + +### `--ftrack-url` +- URL to ftrack server + +### `--ftrack-user` +- user name to log in to ftrack + +### `--ftrack-api-key` +- ftrack api key + +### `--ftrack-events-path` +- path to event server plugins + +### `--no-stored-credentials` +- will use credential specified with options above + +### `--store-credentials` +- will store credentials to file for later use + +-------------------- + +## `launch` + +Launch application in Pype environment. + +### `--app` + +Application name - this should be the same as it's [defining toml](admin_hosts#launchers) file (without .toml) + +### `--project` +Project name + +### `--asset` +Asset name + +### `--task` +Task name + +### `--tools` +*Optional: Additional tools environment files to add* + +### `--user` +*Optional: User on behalf to run* + +### `--ftrack-server` / `-fs` +*Optional: Ftrack server URL* + +### `--ftrack-user` / `-fu` +*Optional: Ftrack user* + +### `--ftrack-key` / `-fk` +*Optional: Ftrack API key* + +For example to run Python interactive console in Pype context: +```sh +pype launch --app python --project my_project --asset my_asset --task my_task +``` + +-------------------- + + +## `publish` + +Pype takes JSON from provided path and use it to publish data in it. +```sh +pype publish +``` + +### `--debug` +- print more verbose infomation + +-------------------- + +## `extractenvironments` + +Extract environment variables for entered context to a json file. + +Entered output filepath will be created if does not exists. + +All context options must be passed otherwise only openpype's global environments will be extracted. + +Context options are "project", "asset", "task", "app" + +### `output_json_path` +- Absolute path to the exported json file + +### `--project` +- Project name + +### `--asset` +- Asset name + +### `--task` +- Task name + +### `--app` +- Application name \ No newline at end of file diff --git a/website/docs/admin_settings.md b/website/docs/admin_settings.md new file mode 100644 index 0000000000..0cd10f81dc --- /dev/null +++ b/website/docs/admin_settings.md @@ -0,0 +1,86 @@ +--- +id: admin_settings +title: Working with settings +sidebar_label: Working with settings +--- + +import Tabs from '@theme/Tabs'; +import TabItem from '@theme/TabItem'; + +OpenPype stores all of it's settings and configuration in the mongo database. To make the configuration as easy as possible we provide a robust GUI where you can access and change everything that is configurable + +**Settings** GUI can be started from the tray menu. + +Please keep in mind that these settings are set-up for the full studio and not per-individual. If you're looking for individual artist settings, you can head to +[Local Settings](#local-settings) section in the artist documentation. + + +## Categories + +We use simple colour coding to show you any changes to the settings: +- **Grey**: [OpenPype default](#openpype-defaults) +- **Green**: [Studio default](#openpype-defaults) +- **Orange**: [Project Override](#project-overrides) +- **Blue**: Changed and unsaved value + +You'll find that settings are split into categories: + +### [System](admin_settings_system) + +System sections contains all settings that can be configured on a studio level, but cannot +be changed on a per-project basis. These include mostly high level options like path to +mongo database, toggling major modules on and off and configuring studio wide application +availability. + +### [Project](admin_settings_project) + +Project tab contains most of OpenPype settings and all of them can be configured and overriden on a per-project basis if need be. This includes most of the workflow behaviors +like what formats to export, naming conventions, publishing validations, automatic assets loaders and a lot more. + +We recommend to try to keep as many configurations as possible on a studio level and only override selectively, because micromanaging all of the project settings might become cumbersome down the line. Most of the settings can be safely adjusted and locked on a project +after the production started. + +## Understanding Overrides + +Most of the individual settings can be set and overriden on multiple levels. + +### OpenPype defaults +When you first open settings all of the values and categories will be marked with a +light **grey labels** or a **grey vertical bar** on the left edge of the expandable category. + +That means, the value has been left at OpenPype Default. If the default changes in future +OpenPype versions, these values will be reflect the change after you deploy the new version. + +### Studio defaults + +Any values that you change and then press save in the bottom right corner, will be saved +as studio defaults. This means they will stay at those values even if you update your pype. +To make it clear which settings are set by you specifically, they are marked with a **green +edge** and **green labels**, once set. + +To set studio default, just change the value in the system tab and press save. If you want +to keep the value but add the option to your studio default to protect it from potential +future updates, you ran `right click` and choose `add to studio default`, then press save. + +In the Project settings tab, you need to select the **( Default )** project on the left, to set your studio defaults for projects. The rest works the same as in the System tab. + +![studio_defaults](assets/settings/studio_defaults.gif) + +You can also reset any settings to OpenPype default by doing `right click` and `remove from studio default` + +![studio_defaults](assets/settings/studio_defaults_remove.gif) + +### Project Overrides + +Many settings are usefull to be adjusted on a per-project basis. To identify project +overrides, they are marked with **orange edge** and **orange labels** in the settings GUI. + +To set project overrides proceed the same way as with the Studio defaults, but first select +a particular project you want to be configuring on the left hand side of the Project Settings tab. + +Here you can see all three overrides at the same time. Deadline has not studio changes at all, Maya has some studio defaults configures and Nuke also contains project specific overrides. +![colours_01](assets/settings/colours_02.png) + +Override colours work as breadcrumbs to allow quick identification of what was changed and where. As you can see on this image, Orange colour is propagated up the hierarchy even though only a single value (sync render version with workfile toggle), was changed. + +![override_breadcumbs](assets/settings/override_breadcrumbs.png) diff --git a/website/docs/admin_settings_project_anatomy.md b/website/docs/admin_settings_project_anatomy.md new file mode 100644 index 0000000000..54023d468f --- /dev/null +++ b/website/docs/admin_settings_project_anatomy.md @@ -0,0 +1,106 @@ +--- +id: admin_settings_project_anatomy +title: Project Anatomy +sidebar_label: Project Anatomy +--- + +import Tabs from '@theme/Tabs'; +import TabItem from '@theme/TabItem'; + + +Project Anatomy is the most important configuration piece for each project you work on with openPype. + +It defines: +- Project Root folders +- File naming conventions +- Folder structure templates +- Default project attributes +- Task Types +- Applications and Tool versions +- Colour Management +- File Formats + +Anatomy is the only configuration that is always saved as project override. This is to make sure, that any updates to OpenPype or Studio default values, don't affect currently running productions. + +![anatomy_01](assets/settings/anatomy_01.png) + +## Roots + +Roots define where files are stored with path to shared folder. It is required to set root path for each platform you are using in studio. All paths must point to same folder! + +![roots01](assets/settings/anatomy_roots01.png) + +It is possible to set multiple roots when necessary. That may be handy when you need to store specific type of data on another disk. +![roots02](assets/settings/anatomy_roots02.png) + + +Note how multiple roots are used here, to push different types of files to different shared storage. +![roots03](assets/settings/anatomy_roots03.png) + + +## Templates + +Templates define project's folder structure and filenames. + +We have a few required anatomy templates for OpenPype to work properly, however we keep adding more when needed. + +### Available template keys + +
+
+ + +| Context key | Description | +| --- | --- | +| `root[name]` | Path to root folder | +| `project[name]` | Project's full name | +| `project[code]` | Project's code | +| `hierarchy` | All hierarchical parents as subfolders | +| `asset` | Name of asset or shot | +| `task` | Name of task | +| `version` | Version number | +| `subset` | Subset name | +| `family` | Main family name | +| `ext` | File extention | +| `representation` | Representation name | +| `frame` | Frame number for sequence files. | +| `output` | | +| `comment` | | + +
+
+ +| Date-Time key | Example result | Description | +| --- | --- | --- | +| `d` | 1, 30 | Short day of month | +| `dd` | 01, 30 | Day of month with 2 digits. | +| `ddd` | Mon | Shortened week day name. | +| `dddd` | Monday | Full week day name. | +| `m` | 1, 12 | Short month number. | +| `mm` | 01, 12 | Month number with 2 digits. | +| `mmm` | Jan | Shortened month name. | +| `mmmm` | January | Full month name. | +| `yy` | 20 | Shortened year. | +| `yyyy` | 2020 | Full year. | +| `H` | 4, 17 | Shortened 24-hour number. | +| `HH` | 04, 17 | 24-hour number with 2 digits. | +| `h` | 5 | Shortened 12-hour number. | +| `hh` | 05 | 12-hour number with 2 digits. | +| `ht` | AM, PM | Midday part. | +| `M` | 0 | Shortened minutes number. | +| `MM` | 00 | Minutes number with 2 digits. | +| `S` | 0 | Shortened seconds number. | +| `SS` | 00 | Seconds number with 2 digits. | + +
+
+ + +## Attributes + + + +## Task Types + + +## Colour Management and Formats \ No newline at end of file diff --git a/website/docs/admin_settings_system.md b/website/docs/admin_settings_system.md new file mode 100644 index 0000000000..0c4a6c863d --- /dev/null +++ b/website/docs/admin_settings_system.md @@ -0,0 +1,132 @@ +--- +id: admin_settings_system +title: System Settings +sidebar_label: System settings +--- + +import Tabs from '@theme/Tabs'; +import TabItem from '@theme/TabItem'; + +## Global + +Settings applicable to the full studio. + +`Studio Name` + +`Studio Code` + +`Environment` + +## Modules + +Configuration of OpenPype modules. Some can only be turned on and off, others have +their own attributes that need to be set, before they become fully functional. + +### Avalon + +`Avalon Mongo Timeout` - You might need to change this if your mongo connection is a bit slow. Making the +timeout longer will give Avalon better chance to connect. + +`Thumbnail Storage Location` - simple disk storage path, where all thumbnails will be stored. + +### Ftrack + +`Server` - URL of your ftrack server. + +Additional Action paths + +`Action paths` - Directories containing your custom ftrack actions. + +`Event paths` - Directories containing your custom ftrack event plugins. + +`Intent` - Special ftrack attribute that mark the intention of individual publishes. This setting will be reflected +in publisher as well as ftrack custom attributes + +`Custom Attributes` - Write and Read permissions for all OpenPype required ftrack custom attributes. The values should be +ftrack roles names. + +### Sync Server + +Disable/Enable OpenPype site sync feature + +### Standalone Publisher + +Disable/Enable Standalone Publisher option + +### Deadline + +`Deadline Rest URL` - URL to deadline webservice that. This URL must be reachable from every +workstation that should be submitting render jobs to deadline via OpenPype. + +### Muster + +`Muster Rest URL` - URL to Muster webservice that. This URL must be reachable from every +workstation that should be submitting render jobs to muster via OpenPype. + +`templates mapping` - you can customize Muster templates to match your existing setup here. + +### Clockify + +`Workspace Name` - name of the clockify workspace where you would like to be sending all the timelogs. + +### Timers Manager + +`Max Idle Time` - Duration (minutes) of inactivity, after which currently running timer will be stopped. + +`Dialog popup time` - Time in minutes, before the end of Max Idle ti, when a notification will alert +the user that their timer is about to be stopped. + +### Idle Manager + +Service monitoring the activity, which triggers the Timers Manager timeouts. + +### Logging + +Module that allows storing all logging into the database for easier retrieval and support. + +## Applications + +In this section you can manage what Applications are available to your studio, locations of their +executables and their additional environments. + +Each DCC is made of two levels. +1. **Application group** - This is the main name of the application and you can define extra environments +that are applicable to all version of the give application. For example any extra Maya scripts that are not +version dependant, can be added to `Maya` environment here. +2. **Application versions** - Here you can define executables (per platform) for each supported version of +the DCC and any default arguments (`--nukex` for instance). You can also further extend it's environment. + +![settings_applications](assets/settings/applications_01.png) + +Please keep in mind that the environments are not additive by default, so if you are extending variables like +`PYTHONPATH`, or `PATH` make sure that you add themselves to the end of the list. + +For instance: + +```json +{ + "PYTHONPATH": [ + "my/path/to/python/scripts", + "{PYTHONPATH}" + ] +} +``` + + + + +## Tools + +A tool in openPype is anything that needs to be selectively added to your DCC applications. Most often these are plugins, modules, extensions or similar depending on what your package happens to call it. + +OpenPype comes with some major CG renderers pre-configured as an example, but these and any others will need to be changed to match your particular environment. + +Their environment settings are split to two levels just like applications to allow more flexibility when setting them up. + +In the image before you can see that we set most of the environment variables in the general MTOA level, and only specify the version variable in the individual versions below. Because all environments within pype setting will resolve any cross references, this is enough to get a fully dynamic plugin loading as far as your folder structure where you store the plugins is nicely organized. + + +In this example MTOA will automatically will the `MAYA_VERSION`(which is set by Maya Application environment) and `MTOA_VERSION` into the `MTOA` variable. We then use the `MTOA` to set all the other variables needed for it to function within Maya. +![tools](assets/settings/tools_01.png) + +All of the tools defined in here can then be assigned to projects. You can also change the tools versions on any project level all the way down to individual asset or shot overrides. So if you just need to upgrade you render plugin for a single shot, while not risking the incompatibilities on the rest of the project, it is possible. \ No newline at end of file diff --git a/website/docs/admin_use.md b/website/docs/admin_use.md new file mode 100644 index 0000000000..376e9397a1 --- /dev/null +++ b/website/docs/admin_use.md @@ -0,0 +1,37 @@ +--- +id: admin_use +title: Install and Run +sidebar_label: Install & Run +--- + +import Tabs from '@theme/Tabs'; +import TabItem from '@theme/TabItem'; + + +## Install + +You can install OpenPype on individual workstations the same way as any other software. +When you create you build, you will end up with an installation package for the platform +that was used for the build. + +- Windows: `OpenPype-3.0.0.msi` +- Linux: `OpenPype-3.0.0.zip` +- Mac: `OpenPype-3.0.0.dmg` + +After OpenPype is installed, it will ask the user for further installation if it detects a +newer version in the studio update location. + +## Run OpenPype + +To use OpenPype on a workstation simply run the executable that was installed. +On the first run the user will be prompted to for OpenPype Mongo URL. +This piece of information needs to be provided to the artist by the admin setting +up OpenPype in the studio. + +Once artist enters the Mongo URL address, OpenPype will remember the connection for the +next launch, so it is a one time process.From that moment OpenPype will do it's best to +always keep up to date with the latest studio updates. + +If the launch was successfull, the artist should see a green OpenPype logo in their +tray menu. Keep in mind that on Windows this icon might be hidden by default, in which case, +the artist can simply drag the icon down to the tray. \ No newline at end of file diff --git a/website/docs/api.md b/website/docs/api.md new file mode 100644 index 0000000000..7cad92d603 --- /dev/null +++ b/website/docs/api.md @@ -0,0 +1,7 @@ +--- +id: api +title: Pype API +sidebar_label: API +--- + +Work in progress diff --git a/website/docs/artist_concepts.md b/website/docs/artist_concepts.md new file mode 100644 index 0000000000..6046ba6214 --- /dev/null +++ b/website/docs/artist_concepts.md @@ -0,0 +1,53 @@ +--- +id: artist_concepts +title: Key concepts +sidebar_label: Key Concepts +--- + +## Glossary + +### Asset + +In our pipeline all the main entities the project is made from are internally considered *'Assets'*. Episode, sequence, shot, character, prop, etc. All of these behave identically in the pipeline. Asset names need to be absolutely unique within the project because they are their key identifier. + +### Subset + +Usually, an asset needs to be created in multiple *'flavours'*. A character might have multiple different looks, model needs to be published in different resolutions, a standard animation rig might not be useable in a crowd system and so on. 'Subsets' are here to accommodate all this variety that might be needed within a single asset. A model might have subset: *'main'*, *'proxy'*, *'sculpt'*, while data of *'look'* family could have subsets *'main'*, *'dirty'*, *'damaged'*. Subsets have some recommendations for their names, but ultimately it's up to the artist to use them for separation of publishes when needed. + +### Version + +A numbered iteration of a given subset. Each version contains at least one [representation][daa74ebf]. + + [daa74ebf]: #representation "representation" + +### Representation + +Each published variant can come out of the software in multiple representations. All of them hold exactly the same data, but in different formats. A model, for example, might be saved as `.OBJ`, Alembic, Maya geometry or as all of them, to be ready for pickup in any other applications supporting these formats. + +### Family + +Each published [subset][3b89d8e0] can have exactly one family assigned to it. Family determines the type of data that the subset holds. Family doesn't dictate the file type, but can enforce certain technical specifications. For example OpenPype default configuration expects `model` family to only contain geometry without any shaders or joins when it is published. + + + [3b89d8e0]: #subset "subset" + + + +### Host + +General term for Software or Application supported by OpenPype and Avalon. These are usually DCC applications like Maya, Houdini or Nuke, but can also be a web based service like Ftrack or Clockify. + + +### Tool + +Small piece of software usually dedicated to a particular purpose. Most of OpenPype and Avalon tools have GUI, but some are command line only + + +### Publish + +Process of exporting data from your work scene to versioned, immutable file that can be used by other artists in the studio. + +### Load + +Process of importing previously published subsets into your current scene, using any of the OpenPype tools. +Loading asset using proper tools will ensure that all your scene content stays version controlled and updatable at a later point diff --git a/website/docs/artist_ftrack.md b/website/docs/artist_ftrack.md new file mode 100644 index 0000000000..2210615160 --- /dev/null +++ b/website/docs/artist_ftrack.md @@ -0,0 +1,134 @@ +--- +id: artist_ftrack +title: Ftrack +sidebar_label: Artist +--- + +# How to use Ftrack in OpenPype + +## Login to Ftrack module in OpenPype (best case scenario) +1. Launch OpenPype and go to systray OpenPype icon. +2. *Ftrack login* window pop up on start or press **login** in **Ftrack menu** to pop up *Ftrack login* window + + ![ftrack-login-2](assets/ftrack/ftrack-login_50.png) + + - Press `Ftrack` button + + ![Login widget](assets/ftrack/ftrack-login_1.png) + - Web browser opens + + - Sign into Ftrack if requested. If you are already signed in to Ftrack via web browser, you can jump to [Application launch](#application-launch-best-case-scenario) + + ![ftrack-login-2](assets/ftrack/ftrack-login_2.png) + +3. Message is shown + +![ftrack-login-3](assets/ftrack/ftrack-login_3.png) + +4. Close message and you're ready to use actions - continue with [Application launch](#application-launch-best-case-scenario) + + +--- + +## Application launch (best case scenario) +1. Make sure OpenPype is running and you passed [Login to Ftrack](#login-to-ftrack-module-in-openpype-best-case-scenario) guide + +2. Open Web browser and go to your studio Ftrack web page *(e.g. https://mystudio.ftrackapp.com/)* + +3. Locate the task to run the application on. + +4. Display actions for the task + ![ftrack-login-3](assets/ftrack/ftrack-login_60.png) + +5. Select application you want to launch + - application versions may be grouped to one action. In that case, press the action to reveal versions to choose from *(like Maya in the picture)*, only applications permitted on the particular project will appear. + ![ftrack-login-3](assets/ftrack/ftrack-login_71-small.png) + +6. Start working ;) + +--- + +## Change Ftrack user +1. Log out the previous user from Ftrack Web app *(skip if new is already logged)* + + ![ftrack-login-3](assets/ftrack/ftrack-login_80-small.png) + +2. Log out the previous user from Ftrack module in OpenPype tray + +
+
+ +![ftrack-login-3](assets/ftrack/ftrack_logout.gif) + +
+
+ +![ftrack-login-3](assets/ftrack/ftrack-login_81.png) + +
+
+ +

+ +3. Follow [Login to Ftrack](#login-to-ftrack-module-in-openpype-best-case-scenario) guide + +--- + +## Where to find API key +- Your API key can be found in Ftrack. In the upper right corner of Ftrack click on the avatar, choose System settings. +- You shouldn't need to use your personal API key if previous steps went through correctly + + ![ftrack-api](assets/ftrack/ftrack-api.png) + +- Scroll down in left panel and select `API keys`. Then pick `Create` button. + + ![ftrack-api](assets/ftrack/ftrack-api2.png) + +- New window will pop up. Choose the `API role` and press `Save` + + ![ftrack-api](assets/ftrack/ftrack-api3.png) + +- Then your new API will be created. + + ![ftrack-api](assets/ftrack/ftrack-api4.png) + +- Copy them and put it into the Ftrack login window. + + ![ftrack-api](assets/ftrack/ftrack-login-api.png) + + +--- +## What if... + +### Ftrack login window didn't pop up and Ftrack menu is not in tray +**1. possibility - OpenPype didn't load properly** +- try to restart OpenPype + +**2. possibility - Ftrack is not set in OpenPype** +- inform your administrator or supervisor + + +### Web browser did not open +**1. possibility - button was not pressed** +- Try to press again the `Ftrack` button in *Ftrack login* window + +**2. possibility - Ftrack URL is not set or is not right** +- Check **Ftrack URL** value in *Ftrack login* window +- Inform your administrator if URL is incorrect and launch OpenPype again when administrator fix it + +**3. possibility - Ftrack Web app can't be reached the way OpenPype use it** +- Enter your **Username** and [API key](#where-to-find-api-key) in *Ftrack login* window and press **Login** button + + ![ftrack-api](assets/ftrack/ftrack-api.gif) +### Ftrack action menu is empty +**1. possibility - OpenPype is not running** +- launch OpenPype and check if it is running in systray + +**2. possibility - You didn't go through Login to Ftrack guide** +- please go through [Login to Ftrack](#login-to-ftrack-module-in-openpype-best-case-scenario) guide + +**3. possibility - User logged to Ftrack Web is not the same as user logged to Ftrack module in tray** +- Follow [Change user](#change-ftrack-user) guide + +**4. possibility - Project doesn't have applications set correctly** +- ask your Project Manager to check if he set applications for the project diff --git a/website/docs/artist_getting_started.md b/website/docs/artist_getting_started.md new file mode 100644 index 0000000000..be1960a38c --- /dev/null +++ b/website/docs/artist_getting_started.md @@ -0,0 +1,84 @@ +--- +title: Getting started with OpenPype +sidebar_label: Getting started +--- + +import Tabs from '@theme/Tabs'; +import TabItem from '@theme/TabItem'; + + +## Working in the studio + +In studio environment you should have OpenPype already installed and deployed, so you can start using it without much setup. Your admin has probably put OpenPype icon on your desktop or even had your computer set up so OpenPype will start automatically. + +If this is not the case, please contact your administrator to consult on how to launch OpenPype in your studio + +## Working from home + +If you are working from home though, you'll need to install it yourself. You should, however, receive the OpenPype installer files from your studio +admin, supervisor or production, because OpenPype versions and executables might not be compatible between studios. + +To install OpenPype you just need to unzip it anywhere on the disk + +To use it, you have two options + +**openpype_gui.exe** is the most common for artists. It runs OpenPype GUI in system tray. From there you can run all the available tools. To use any of the features, OpenPype must be running in the tray. + +**openpype_console.exe** in usefull for debugging and error reporting. It opens console window where all the necessary information will appear during user's work. + + + + + + +WIP - Windows instructions once installers are finished + + + + +WIP - Linux instructions once installers are finished + + + + +WIP - Mac instructions once installers are finished + + + + + +## First Launch + + +When you first start OpenPype, you will be asked to give it some basic information. +### MongoDB + +In most cases that will only be your studio MongoDB Address. + +It is a URL that you should receive from you studio and most often will look like this `mongodb://username:passwword@mongo.mystudiodomain.com:12345` or `mongodb://192.168.100.15:27071`, it really depends on your studio setup. When OpenPype Igniter +asks for it, just put it in the corresponding text field and press `install` button. + +### OpenPype Version Repository + +Sometimes your studio might also ask you to fill in the path to it's version +repository. This is a location where OpenPype will be looking for when checking +if it's up to date and where updates are installed from automatically. + +This pat is usually taken from the database directly, so you shouldn't need it. + + +## Updates + +If you're connected to your studio, OpenPype will check for, and install updates automatically everytime you run it. That's why during the first start, it will go through a quick update installation process, even though you might have just installed it. + + +## Advanced use + +For more advanced use of OpenPype commands please visit [Admin section](admin_openpype_commands). diff --git a/website/docs/artist_hosts.md b/website/docs/artist_hosts.md new file mode 100644 index 0000000000..609f6d97c8 --- /dev/null +++ b/website/docs/artist_hosts.md @@ -0,0 +1,17 @@ +--- +id: artist_hosts +title: Hosts +sidebar_label: Hosts +--- + +## Maya + +## Houdini + +## Nuke + +## Fusion + +## Unreal + +## System diff --git a/website/docs/artist_hosts_aftereffects.md b/website/docs/artist_hosts_aftereffects.md new file mode 100644 index 0000000000..879c0d4646 --- /dev/null +++ b/website/docs/artist_hosts_aftereffects.md @@ -0,0 +1,107 @@ +--- +id: artist_hosts_aftereffects +title: AfterEffects +sidebar_label: AfterEffects +--- + +## Available Tools + +- [Work Files](artist_tools.md#workfiles) +- [Create](artist_tools.md#creator) +- [Load](artist_tools.md#loader) +- [Publish](artist_tools.md#publisher) +- [Manage](artist_tools.md#inventory) +- [Subset Manager](artist_tools.md#subset-manager) + +## Setup + +To install the extension, download, install [Anastasyi's Extention Manager](https://install.anastasiy.com/). Open Anastasyi's Extension Manager and select AfterEffects in menu. Then go to `{path to pype}/repos/avalon-core/avalon/aftereffects/extension.zxp`. + +Drag extension.zxp and drop it to Anastasyi's Extension Manager. The extension will install itself. + +## Implemented functionality + +AfterEffects implementation currently allows you to import and add various media to composition (image plates, renders, audio files, video files etc.) +and send prepared composition for rendering to Deadline. + +## Usage + +When you launch AfterEffects you will be met with the Workfiles app. If don't +have any previous workfiles, you can just close this window. + +Workfiles tools takes care of saving your .AEP files in the correct location and under +a correct name. You should use it instead of standard file saving dialog. + +In AfterEffects you'll find the tools in the `OpenPype` extension: + +![Extension](assets/photoshop_extension.PNG) + +You can show the extension panel by going to `Window` > `Extensions` > `OpenPype`. + +### Create + +When you have created an composition you want to publish, you will need to tag existing composition. To do this open the `Creator` through the extensions `Create` button. + +![Creator](assets/aftereffects_creator.png) + +Because of current rendering limitations, it is expected that only single composition will be marked for publishing! + +After Creator is successfully triggered on selected composition, it will be marked with an icon and its color +will be changed. + +![Higlights](assets/aftereffects_creator_after.png) + +### Publish + +When you are ready to share your work, you will need to publish it. This is done by opening the `Publish` by clicking the corresponding button in the OpenPype Panel. + +![Publish](assets/aftereffects_publish.png) + +This tool will run through checks to make sure the contents you are publishing is correct. Hit the "Play" button to start publishing. + +You may encounter issues with publishing which will be indicated with red squares. If these issues are within the validation section, then you can fix the issue. If there are issues outside of validation section, please let the OpenPype team know. For More details have a look at the general [Publish](artist_tools.md#publisher) documentation. + +### Load + +When you want to load existing published work, you can use the `Loader` tool. You can reach it in the extension's panel. + +![Loader](assets/photoshop_loader.PNG) + +The supported families for loading into AfterEffects are: + +- `image` +- `plate` +- `render` +- `prerender` +- `review` +- `audio` +- `background` `(set of images sorted in predefined order)` + +To load an item, right-click on the subset you want and choose a representation you want to load: + +![Loader](assets/photoshop_loader_load.gif) + +### Manage + +Now that we have some content loaded, you can manage which version is loaded. This is done through the `Scene Manager`. You can reach it through the extension's `Manage` button. + +:::note +Loaded images have to stay as smart layers in order to be updated. If you rasterize the layer, you can no longer update it to a different version using OpenPype tools. +::: + +![Loader](assets/photoshop_manage.PNG) + +You can switch to a previous version of the image or update to the latest. + +![Loader](assets/photoshop_manage_switch.gif) +![Loader](assets/photoshop_manage_update.gif) + +### Subset Manager + +![subset_manager](assets/tools_subset_manager.png) + +All created compositions will be shown in a simple list. If user decides, that this composition shouldn't be +published after all, right click on that item in the list and select 'Remove instance' + +Removing composition direclty in the AE would result to worfile contain phantom metadata which could result in +errors during publishing! \ No newline at end of file diff --git a/website/docs/artist_hosts_blender.md b/website/docs/artist_hosts_blender.md new file mode 100644 index 0000000000..877e99bff4 --- /dev/null +++ b/website/docs/artist_hosts_blender.md @@ -0,0 +1,226 @@ +--- +id: artist_hosts_blender +title: Blender +sidebar_label: Blender +--- + +## OpenPype global tools + +- [Set Context](artist_tools.md#set-context) +- [Work Files](artist_tools.md#workfiles) +- [Create](artist_tools.md#creator) +- [Load](artist_tools.md#loader) +- [Manage (Inventory)](artist_tools.md#inventory) +- [Publish](artist_tools.md#publisher) +- [Library Loader](artist_tools.md#library-loader) + +## Working with OpenPype in Blender + +OpenPype is here to ease you the burden of working on project with lots of +collaborators, worrying about naming, setting stuff, browsing through endless +directories, loading and exporting and so on. To achieve that, OpenPype is using +concept of being _"data driven"_. This means that what happens when publishing +is influenced by data in scene. This can by slightly confusing so let's get to +it with few examples. + + +## Setting scene data + +Blender settings concerning framerate, resolution and frame range are handled +by OpenPype. If set correctly in Ftrack, Blender will automatically set the +values for you. + + +## Publishing models + +### Intro + +Publishing models in Blender is pretty straightforward. Create your model as you +need. You might need to adhere to specifications of your studio that can be different +between studios and projects but by default your geometry does not need any +other convention. + +![Model example](assets/blender-model_example.jpg) + +### Creating instance + +Now create **Model instance** from it to let OpenPype know what in the scene you want to +publish. Go **OpenPype β†’ Create... β†’ Model**. + +![Model create instance](assets/blender-model_create_instance.jpg) + +`Asset` field is a name of asset you are working on - it should be already filled +with correct name as you've started Blender or switched context to specific asset. You +can edit that field to change it to different asset (but that one must already exists). + +`Subset` field is a name you can decide on. It should describe what kind of data you +have in the model. For example, you can name it `Proxy` to indicate that this is +low resolution stuff. See [Subset](artist_concepts#subset). + + + +Read-only field just under it show final subset name, adding subset field to +name of the group you have selected. + +`Use selection` checkbox will use whatever you have selected in Outliner to be +wrapped in Model instance. This is usually what you want. Click on **Create** button. + +You'll notice then after you've created new Model instance, there is a new +collection in Outliner called after your asset and subset, in our case it is +`character1_modelDefault`. The assets selected when creating the Model instance +are linked in the new collection. + +And that's it, you have your first model ready to publish. + +Now save your scene (if you didn't do it already). You will notice that path +in Save dialog is already set to place where scenes related to modeling task on +your asset should reside. As in our case we are working on asset called +**character1** and on task **modeling**, path relative to your project directory will be +`project_XY/assets/character1/work/modeling`. The default name for the file will +be `project_XY_asset_task_version`, so in our case +`simonetest_character1_modeling_v001.blend`. Let's save it. + +![Model create instance](assets/blender-save_modelling_file.jpg) + +### Publishing models + +Now let's publish it. Go **OpenPype β†’ Publish...**. You will be presented with following window: + +![Model publish](assets/blender-model_pre_publish.jpg) + +Note that content of this window can differs by your pipeline configuration. +For more detail see [Publisher](artist_tools#publisher). + +Items in left column are instances you will be publishing. You can disable them +by clicking on square next to them. White filled square indicate they are ready for +publishing, red means something went wrong either during collection phase +or publishing phase. Empty one with gray text is disabled. + +See that in this case we are publishing from the scene file +`simonetest_character1_modeling_v001.blend` the Blender model named +`character1_modelDefault`. + +Right column lists all tasks that are run during collection, validation, +extraction and integration phase. White items are optional and you can disable +them by clicking on them. + +Lets do dry-run on publishing to see if we pass all validators. Click on flask +icon at the bottom. Validators are run. Ideally you will end up with everything +green in validator section. + +### Fixing problems + +For the sake of demonstration, I intentionally kept the model in Edit Mode, to +trigger the validator designed to check just this. + +![Failed Model Validator](assets/blender-model_publish_error.jpg) + +You can see our model is now marked red in left column and in right we have +red box next to `Mesh is in Object Mode` validator. + +You can click on arrow next to it to see more details: + +![Failed Model Validator details](assets/blender-model_error_details.jpg) + +From there you can see in **Records** entry that there is problem with the +object `Suzanne`. Some validators have option to fix problem for you or just +select objects that cause trouble. This is the case with our failed validator. + +In main overview you can notice little A in a circle next to validator +name. Right click on it and you can see menu item `select invalid`. This +will select offending object in Blender. + +Fix is easy. Without closing Publisher window we just turn back the Object Mode. +Then we need to reset it to make it notice changes we've made. Click on arrow +circle button at the bottom and it will reset the Publisher to initial state. Run +validators again (flask icon) to see if everything is ok. + +It should OK be now. Write some comment if you want and click play icon button +when ready. + +Publish process will now take its course. Depending on data you are publishing +it can take a while. You should end up with everything green and message +**Finished successfully ...** You can now close publisher window. + +To check for yourself that model is published, open +[Asset Loader](artist_tools#loader) - **OpenPype β†’ Load...**. +There you should see your model, named `modelDefault`. + +### Loading models + +You can load model with [Loader](artist_tools.md#loader). Go **OpenPype β†’ Load...**, +select your rig, right click on it and click **Link model (blend)**. + +## Creating Rigs + +Creating and publishing rigs with OpenPype follows similar workflow as with +other data types. Create your rig and mark parts of your hierarchy in sets to +help OpenPype validators and extractors to check it and publish it. + +### Preparing rig for publish + +When creating rigs in Blender, it is important to keep a specific structure for +the bones and the geometry. Let's first create a model and its rig. For +demonstration, I'll create a simple model for a robotic arm made of simple boxes. + +![Blender - Simple model for rigging](assets/blender-rig_model_setup.jpg) + +I have now created the armature `RIG_RobotArm`. While the naming is not important, +you can just adhere to your naming conventions, the hierarchy is. Once the models +are skinned to the armature, the geometry must be organized in a separate Collection. +In this case, I have the armature in the main Collection, and the geometry in +the `Geometry` Collection. + +![Blender - Rig Hierarchy Example](assets/blender-rig_hierarchy_example.jpg) + +When you've prepared your hierarchy, it's time to create *Rig instance* in OpenPype. +Select your whole rig hierarchy and go **OpenPype β†’ Create...**. Select **Rig**. + +![Blender - Rig Hierarchy Example](assets/blender-rig_create.jpg) + +A new collection named after the selected Asset and Subset should have been created. +In our case, it is `character1_rigDefault`. All the selected armature and models +have been linked in this new collection. You should end up with something like +this: + +![Blender - Rig Hierarchy Example](assets/blender-rig_hierarchy_before_publish.jpg) + +### Publishing rigs + +Publishing rig is done in same way as publishing everything else. Save your scene +and go **OpenPype β†’ Publish**. For more detail see [Publisher](artist_tools#publisher). + +### Loading rigs + +You can load rig with [Loader](artist_tools.md#loader). Go **OpenPype β†’ Load...**, +select your rig, right click on it and click **Link rig (blend)**. + +## Layouts in Blender + +A layout is a set of elements that populate a scene. OpenPype allows to version +and manage those sets. + +### Publishing a layout + +Working with Layout is easy. Just load your assets into scene with +[Loader](artist_tools.md#loader) (**OpenPype β†’ Load...**). Populate your scene as +you wish, translate each piece to fit your need. When ready, select all imported +stuff and go **OpenPype β†’ Create...** and select **Layout**. When selecting rigs, +you need to select only the armature, the geometry will automatically be included. +This will create set containing your selection and marking it for publishing. + +Now you can publish is with **OpenPype β†’ Publish**. + +### Loading layouts + +You can load a Layout using [Loader](artist_tools.md#loader) +(**OpenPype β†’ Load...**). Select your layout, right click on it and +select **Link Layout (blend)**. This will populate your scene with all those +models you've put into layout. \ No newline at end of file diff --git a/website/docs/artist_hosts_harmony.md b/website/docs/artist_hosts_harmony.md new file mode 100644 index 0000000000..3e945a2852 --- /dev/null +++ b/website/docs/artist_hosts_harmony.md @@ -0,0 +1,128 @@ +--- +id: artist_hosts_harmony +title: Harmony +sidebar_label: Harmony +--- + +## Available Tools + +- [Work Files](artist_tools.md#workfiles) +- [Create](artist_tools.md#creator) +- [Load](artist_tools.md#loader) +- [Publish](artist_tools.md#publisher) +- [Manage](artist_tools.md#inventory) + +:::note +Only one tool can be open at a time. If you open a tool while another tool is open, it will wait in queue for the existing tool to be closed. Once the existing tool is closed, the new tool will open. +::: + +## Usage + +The integration creates an `OpenPype` menu entry where all related tools are located. + +:::note +Menu creation can be temperamental. Its best to start Harmony and do nothing else until the application is fully launched. +If you dont see the `OpenPype` menu, then follow this to create it: +- Go to the `Script Editor` +- Find the script called `TB_sceneOpened.js` and run it. +- Choose the `start` method to run. +::: + +### Workfiles + +`OpenPype > Workfiles` + +Work files are temporarily stored locally, in `[user]/.avalon/harmony`, to reduce network bandwidth. When saving the Harmony scene, a background process ensures the network files are updated. + +:::important +Because the saving to the network location happens in the background, be careful when quickly saving and closing Harmony (and the terminal window) since an interrupted saving to the network location can corrupt the workfile. To be sure the workfile is saved to the network location look in the terminal for a line similar to this: + +`DEBUG:avalon.harmony.lib:Saved "[Local Scene Directory]" to "[Network Scene Directory]\[Name Of Workfile].zip"` +::: + +### Create + +`OpenPype > Create` + +![Creator](assets/harmony_creator.PNG) + +These are the families supported in Harmony: + +- `Render` + - This instance is for generating a render and review. This is a normal write node, but only PNGs are supported at the moment. +- `Template` + - This instance is for generating a templates. This is a normal composite node, which you can connect any number of nodes to. + - Any connected nodes will be published along with their dependencies and any back drops. +- `Palette` + - Palettes are indirectly supported in Harmony. This means you just have to have palettes in your scene to publish them. + +When you `Use selection` on creation, the last selected node will be connected to the created node. + +### Publish + +`OpenPype > Publish` + +![Publish](assets/photoshop_publish.PNG) + +This tool will run through checks to make sure the contents you are publishing is correct. Hit the "Play" button to start publishing. + +You may encounter issues with publishing which will be indicated with red squares. If these issues are within the validation section, then you can fix the issue. If there are issues outside of validation section, please let the OpenPype team know. + +#### Repair Validation Issues + +All validators will give some description about what the issue is. You can inspect this by going into the validator through the arrow: + +![Inspect](assets/photoshop_publish_inspect.PNG) + +You can expand the errors by clicking on them for more details: + +![Expand](assets/photoshop_publish_expand.PNG) + +Some validator have repair actions, which will fix the issue. If you can identify validators with actions by the circle icon with an "A": + +![Actions](assets/photoshop_publish_actions.PNG) + +To access the actions, you right click on the validator. If an action runs successfully, the actions icon will turn green. Once all issues are fixed, you can just hit the "Refresh" button and try to publish again. + +![Repair](assets/photoshop_publish_repair.gif) + +### Load + +`OpenPype > Load` + +![Loader](assets/photoshop_loader.PNG) + +The supported families for Harmony are: + +- `image` +- `harmony.template` + - Only import is current supported for templates. +- `harmony.palette` + - Loaded palettes are moved to the top of the colour stack, so they will acts as overrides. Imported palettes are left in the scene. +- `workfile` + - Only of type `zip`. + +To load, right-click on the subset you want and choose a representation: + +![Loader](assets/photoshop_loader_load.gif) + +:::note +Loading templates or workfiles will import the contents into scene. Referencing is not supported at the moment, so you will have to load newer versions into the scene. +::: + +### Manage + +`OpenPype > Manage` + +![Loader](assets/photoshop_manage.PNG) + +You can switch to a previous version of the image or update to the latest. + +![Loader](assets/photoshop_manage_switch.gif) +![Loader](assets/photoshop_manage_update.gif) + +:::note +Images and image sequences will be loaded into the scene as read nodes can coloured green. On startup the pipeline checks for any outdated read nodes and colours them red. +- Green = Up to date version in scene. +- Red = Outdated version in scene. +::: diff --git a/website/docs/artist_hosts_hiero.md b/website/docs/artist_hosts_hiero.md new file mode 100644 index 0000000000..4ada1fba2d --- /dev/null +++ b/website/docs/artist_hosts_hiero.md @@ -0,0 +1,193 @@ +--- +id: artist_hosts_hiero +title: Hiero +sidebar_label: Hiero / Nuke Studio +--- + +import Tabs from '@theme/Tabs'; +import TabItem from '@theme/TabItem'; + +:::note +All the information also applies to **_Nuke Studio_**(NKS), but for simplicity we only refer to Hiero/NKS. The workflows are identical for both. We are supporting versions **`11.0`** and above. +::: + + + +## OpenPype global tools + +- [Work Files](artist_tools.md#workfiles) +- [Create](artist_tools.md#creator) +- [Load](artist_tools.md#loader) +- [Manage (Inventory)](artist_tools.md#inventory) +- [Publish](artist_tools.md#publisher) + + +## Hiero specific tools + + + +
+
+ +### Create Default Tags + +This tool will recreate all necessary OpenPype tags needed for successful publishing. It is automatically ran at start of the Hiero/NKS. Use this tool to manually re-create all the tags if you accidentaly delete them, or you want to reset them to default values. + +#### Result + +- Will create tags in Tags bin in case there were none +- Will set all tags to default values if they have been altered + +
+
+ +![Default Tags](assets/hiero_defaultTags.png) + +
+
+ +
+
+ +### Apply Colorspace Project + +This tool will set any defined colorspace definition from OpenPype `Settings / Project / Anatomy / Color Management and Output Formats / Hiero / Workfile` to Hiero `menu / Project / Edit Settings / Color Management tab` + +#### Result + +- Define corect color management settings on project + +
+
+ +![Default Tags](assets/hiero_menuColorspaceProject.png) + +
+
+ + +
+
+ +### Apply Colorspace Clips + +This tool will set any defined colorspace definition from OpenPype `Settings / Project / Anatomy / Color Management and Output Formats / Hiero / Colorspace on Inputs by regex detection` to any matching clip's source path. + +#### Result + +- Set correct `Set Media Color Transform` on each clip of active timeline if it matches defined expressions + +
+
+ +![Default Tags](assets/hiero_menuColorspaceClip.png) + +
+
+ +## Publishing Shots + + + +
+ +With OpenPype, you can use Hiero/NKS as a starting point for creating a project's **shots** as *assets* from timeline clips with its *hierarchycal parents* like **episodes**, **sequences**, **folders**, and its child **tasks**. Most importantly it will create **versions** of plate *subsets*, with or without **reference video**. Publishig is naturally creating clip's **thumbnails** and assigns it to shot *asset*. Hiero is also publishing **audio** *subset* and various **soft-effects** either as retiming component as part of published plates or **color-tranformations**, that will be evailable later on for compositor artists to use either as *viewport input-process* or *loaded nodes* in graph editor. +



+ +### Preparing timeline for conversion to instances +Because we don't support on-fly data conversion so in case of working with raw camera sources or some other formats which need to be converted for 2D/3D work. We suggest to convert those before and reconform the timeline. Before any clips in timeline could be converted to publishable instances we recomend following. +1. Merge all tracks which supposed to be one and they are multipy only because of editor's style +2. Rename tracks to follow basic structure > if only one layer then `main` in case of multiple layer (elements) for one shot then `main`, and other elements for example: `bg`, `greenscreen`, `fg01`, `fg02`, `display01`, etc. please avoid using [-/_.,%&*] or spaces. These names will be later used in *subset* name creation as `{family}{trackName}` so for example **plateMain** or **plateFg01** +3. Define correct `Set Media Color Transform` at all clips as those will be also published to metadata and used for later loading with correct color transformation. +4. Reviewable video material which you wish to be used as preview videos on any supported Projec manager platform (Ftrack) has to be added ideally to track named **review**. This can be offline edit used as reference video for 2D/3D artists. This video material can be edited to fit length of **main** timeline track or it cand be one long video clip under all clips in **main** track, because OpenPype will trim this to appropriate length with use of FFMPEG. Please be avare we only support MP4(h264) or JPG sequence at the moment. + +
+ +![Create menu](assets/hiero_timelinePrep.png) + +
+ + +### Converting timeline clips to instances + +Every clip on timeline which is inteded to be published has to be converted to publishable instance. + +
+ +In OpenPype it is done by tagging a clip with our own metadata. Select all clips you wish to convert and `menu > OpenPype > Create`. +



+ +
+ +
+ +![Create menu](assets/hiero_menuCreate.png) + +
+ +
+ +Then chose `Create Publishable Clip` in **Instance Creator** dialogue. +

+ +Then you can alter Subset name, but this will be changed dynamically and replaces with timeline's track name. +

+ +Keep **Use selection** on. +

+ +Hit **Create** +

+
+ +
+ +![Instance Creator](assets/hiero_instanceCreator.png) + +
+
+ +Dialogue `Pype publish attributes creator` will open. Here you can define instance properties. If you wish to rename clips dynamically during creation then Keep **Rename clips** ticked. +

+ +In case you wish to use *multiple elements of shots* workflow then keep **Enamble vertical sync** ticked on and define correct hero track which is holding main plates, this is usually the **main** track. +
+ +
+ +![Create menu](assets/hiero_createUIRename.png) + +
+
+ +Subset name is created dynamically if `` is selected on **Subset name**. +

+ +I case you wish to publish reviewable video as explained above then find the appropriate track from drop down menu **Use review track**. Usually named `review` +

+ +Hover above each input field for help. +

+ +Handles can be defined here to. In case you wish to have individual clip set differently we recomend to set here the default value and later change those in the created OpenPype tag's metadata under `handleStart` and `handleEnd` properties (look bellow for details) +
+ +
+ +![Create menu](assets/hiero_createUIFrames.png) + +
+
+ +After you hit **Ok** tags are added to selected clips (except clips in **review** tracks). +

+ +If you wish to change any individual propertie of the shot then you are able to do it here. In this example we can change `handleStart` and `handleEnd` to some other values. +
+ +
+ +![Create menu](assets/hiero_tagHandles.png) + +
+
diff --git a/website/docs/artist_hosts_maya.md b/website/docs/artist_hosts_maya.md new file mode 100644 index 0000000000..d19bde7b49 --- /dev/null +++ b/website/docs/artist_hosts_maya.md @@ -0,0 +1,717 @@ +--- +id: artist_hosts_maya +title: Maya +sidebar_label: Maya +--- + +## OpenPype global tools + +- [Set Context](artist_tools.md#set-context) +- [Work Files](artist_tools.md#workfiles) +- [Create](artist_tools.md#creator) +- [Load](artist_tools.md#loader) +- [Manage (Inventory)](artist_tools.md#inventory) +- [Publish](artist_tools.md#publisher) +- [Library Loader](artist_tools.md#library-loader) + +## Working with OpenPype in Maya + +OpenPype is here to ease you the burden of working on project with lots of +collaborators, worrying about naming, setting stuff, browsing through endless +directories, loading and exporting and so on. To achieve that, OpenPype is using +concept of being _"data driven"_. This means that what happens when publishing +is influenced by data in scene. This can by slightly confusing so let's get to +it with few examples. + +## Publishing models + +### Intro + +Publishing models in Maya is pretty straightforward. Create your model as you +need. You need to adhere to specifications of your studio that can be different +between studios and projects but by default your geometry has to be named properly. +For example `sphere_GEO` or `cube1_GEO`. Geometry needs to have freezed transformations +and must reside under one group, for example `model_GRP`. + +![Model example](assets/maya-model_hierarchy_example.jpg) + +Note that `sphere_GEO` has frozen transformations. + +### Creating instance + +Now create **Model instance** from it to let OpenPype know what in the scene you want to +publish. Go **OpenPype β†’ Create... β†’ Model** + +![Model create instance](assets/maya-model_create_instance.jpg) + +`Asset` field is a name of asset you are working on - it should be already filled +with correct name as you've started Maya or switched context to specific asset. You +can edit that field to change it to different asset (but that one must already exists). + +`Subset` field is a name you can decide on. It should describe what kind of data you +have in the model. For example, you can name it `Proxy` to indicate that this is +low resolution stuff. See [Subset](artist_concepts#subset). + +:::note LOD support +By changing subset name you can take advantage of _LOD support_ in OpenPype. Your +asset can contain various resolution defined by different subsets. You can then +switch between them very easy using [Inventory (Manage)](artist_tools#inventory). +There LODs are conveniently grouped so they don't clutter Inventory view. + +Name your subset like `main_LOD1`. Important part is that `_LOD1`. You can have as many LODs as you need. +::: + +Read-only field just under it show final subset name, adding subset field to +name of the group you have selected. + +`Use selection` checkbox will use whatever you have selected in Outliner to be +wrapped in Model instance. This is usually what you want. Click on **Create** button. + +You'll notice then after you've created new Model instance, there is new set +in Outliner called after your subset, in our case it is `modelMain`. + +And that's it, you have your first model ready to publish. + +Now save your scene (if you didn't do it already). You will notice that path +in Save dialog is already set to place where scenes related to modeling task on +your asset should reside. As in our case we are working on asset called +**Ben** and on task **modeling**, path relative to your project directory will be +`project_XY/assets/ben/work/modeling`. Let's save our scene as `model_test_v01`. + +### Publishing models + +Now let's publish it. Go **OpenPype β†’ Publish...**. You will be presented with following window: + +![Model publish](assets/maya-model_pre_publish.jpg) + +Note that content of this window can differs by your pipeline configuration. +For more detail see [Publisher](artist_tools#publisher). + +Items in left column are instances you will be publishing. You can disable them +by clicking on square next to them. Green square indicate they are ready for +publishing, red means something went wrong either during collection phase +or publishing phase. Empty one with gray text is disabled. + +See that in this case we are publishing from scene file `model_test_v01.mb` in +Maya model named `modelMain (ben)` (next item). Publishing of workfile is +currenly disabled (last item). + +Right column lists all tasks that are run during collection, validation, +extraction and integration phase. White items are optional and you can disable +them by clicking on them. + +Lets do dry-run on publishing to see if we pass all validators. Click on flask +icon at the bottom. Validators are run. Ideally you will end up with everything +green in validator section. + +### Fixing problems + +To make things interesting, I intentionally forgot to freeze transformations +on `sphere_GEO` as I know it will trigger validator designed to check just this. + +![Failed Model Validator](assets/maya-model_publish_error.jpg) + +You can see our model is now marked red in left column and in right we have +red box next to `Transform Zero (Freeze)` validator. + +You can click on arrow next to it to see more details: + +![Failed Model Validator details](assets/maya-model_freeze_error_details.jpg) + +From there you can see in **Records** entry that there is problem with `sphere_GEO`. +Some validators have option to fix problem for you or just select objects that +cause trouble. This is the case with our failed validator. + +In main overview you can notice little up arrow in a circle next to validator +name. Right click on it and you can see menu item `select invalid`. This +will select offending object in Maya. + +Fix is easy. Without closing Publisher window we just freeze transformations. +Then we need to reset it to make it notice changes we've made. Click on arrow +circle button at the bottom and it will reset Publisher to initial state. Run +validators again (flask icon) to see if everything is ok. + +It should be now. Write some comment if you want and click play icon button +when ready. + +Publish process will now take its course. Depending on data you are publishing +it can take a while. You should end up with everything green and message +**Finished successfully ...** You can now close publisher window. + +To check for yourself that model is published, open +[Asset Loader](artist_tools#loader) - **OpenPype β†’ Load...**. +There you should see your model, named `modelMain`. + +## Look development + +Look development in OpenPype is easy. It helps you with versioning different +kinds of shaders and easy switching between them. + +Let se how it works. + +### Loading model + +In this example I have already published model of Buddha. To see how to publish +model with OpenPype see [Publishing Model](artist_hosts_maya#publishing-models). + +First of lets start with empty scene. Now go **OpenPype β†’ Load...** + +![Model loading](assets/maya-model_loading.jpg) + +Here I am loading `modelBuddha`, its version 1 for asset **foo**. Just right-click +on it and select **Reference (abc)**. This will load model into scene as alembic. +Now you can close Loader window. + +### Creating look + +Now you can create whatever look you want. Assign shaders, textures, etc. to model. +In my case, I assigned simple Arnolds _aiSurfaceShader_ and changed its color to red. + +![Look Dev - Red Buddha](assets/maya-look_dev-red_buddha.jpg) + +I am quite happy with it so I want to publish it as my first look. + +### Publishing look + +Select your model in outliner and ho **OpenPype β†’ Create...**. From there +select **Look**. Make sure `use selection` checkbox is checked. +Mine subset name is `Main`. This will create _Look instance_ with a name **lookMain**. + +Close _Creator_ window. + +Now save your scene, give it some sensible name. Next, go **OpenPype β†’ Publish**. +This process is almost identical as publishing models, only different _Validators_ +and other plugins will be used. + +This should be painless and cause no trouble so go ahead, click play icon button at +the bottom and it will publish your look. + +:::note publishing multiple looks +You can reference same model into scene multiple times, change materials on every +instance with what you need. Then on every model create _Look instance_. When +publishing all those _Look instances_ will be published at same time. +::: + +### Loading looks into models + +Now lets see how look are applied. Start new empty scene, load your published +model there as before (using _Reference (abc)_). If you didn't notice until now, +there are few yellow icons in left shelf: + +![Maya - shortcut icons](assets/maya-shortcut_buttons.jpg) + +Those are shortcuts for **Look Manager**, [Work Files](artist_tools.md#workfiles), +[Load](artist_tools.md#loader), and [Manage (Inventory)](artist_tools.md#inventory). + +Those can be found even in top menu, but that depends on your studio setup. + +You are interested now in **Look Manager** - first item with brush icon. Select +your Buddha model and open **Look Manager**. + +![Maya - Look Manager](assets/maya-look_dev-look_manager.jpg) + +This is **Look Manager** window. Yours would be empty until you click **Get All Assets** +or **Get Assets From Selection**. You can use later to quick assign looks if you have +multiple assets loaded in scene. Click on one of those button now. + +You should now see all assets and their subsets loaded in scene, and on right side +all applicable published looks. + +Select you asset and on the right side right click on `Main` look. Apply it. + +You notice that Buddha model is now red, materials you've published are now applied +to it. + +That way you can create looks as you want and version them using OpenPype. + +## Setting scene data + +Maya settings concerning framerate, resolution and frame range are handled by +OpenPype. If set correctly in Ftrack, Maya will validate you have correct fps on +scene save and publishing offering way to fix it for you. + +For resolution and frame range, use **OpenPype β†’ Reset Frame Range** and +**OpenPype β†’ Reset Resolution** + + +## Creating rigs with OpenPype + +Creating and publishing rigs with OpenPype follows similar workflow as with +other data types. Create your rig and mark parts of your hierarchy in sets to +help OpenPype validators and extractors to check it and publish it. + +### Preparing rig for publish + +When creating rigs, it is recommended (and it is in fact enforced by validators) +to separate bones or driving objects, their controllers and geometry so they are +easily managed. Currently OpenPype doesn't allow to publish model at the same time as +its rig so for demonstration purposes, I'll first create simple model for robotic +arm, just made out of simple boxes and I'll publish it. + +![Maya - Simple model for rigging](assets/maya-rig_model_setup.jpg) + +For more information about publishing models, see [Publishing models](artist_hosts_maya#publishing-models). + +Now lets start with empty scene. Load your model - **OpenPype β†’ Load...**, right +click on it and select **Reference (abc)**. + +I've created few bones and their controllers in two separate +groups - `rig_GRP` and `controls_GRP`. Naming is not important - just adhere to +your naming conventions. + +Then I've put everything into `arm_rig` group. + +When you've prepared your hierarchy, it's time to create *Rig instance* in OpenPype. +Select your whole rig hierarchy and go **OpenPype β†’ Create...**. Select **Rig**. +Set is created in your scene to mark rig parts for export. Notice that it has +two subsets - `controls_SET` and `out_SET`. Put your controls into `controls_SET` +and geometry to `out_SET`. You should end up with something like this: + +![Maya - Rig Hierarchy Example](assets/maya-rig_hierarchy_example.jpg) + +### Publishing rigs + +Publishing rig is done in same way as publishing everything else. Save your scene +and go **OpenPype β†’ Publish**. When you run validation you'll mostly run at first into +few issues. Although number of them will seem to be intimidating at first, you'll +find out they are mostly minor things easily fixed. + +* **Non Duplicate Instance Members (ID)** - This will most likely fail because when +creating rigs, we usually duplicate few parts of it to reuse them. But duplication +will duplicate also ID of original object and OpenPype needs every object to have +unique ID. This is easily fixed by **Repair** action next to validator name. click +on little up arrow on right side of validator name and select **Repair** form menu. + +* **Joints Hidden** - This is enforcing joints (bones) to be hidden for user as +animator usually doesn't need to see them and they clutter his viewports. So +well behaving rig should have them hidden. **Repair** action will help here also. + +* **Rig Controllers** will check if there are no transforms on unlocked attributes +of controllers. This is needed because animator should have ease way to reset rig +to it's default position. It also check that those attributes doesn't have any +incoming connections from other parts of scene to ensure that published rig doesn't +have any missing dependencies. + +### Loading rigs + +You can load rig with [Loader](artist_tools.md#loader). Go **OpenPype β†’ Load...**, +select your rig, right click on it and **Reference** it. + +## Point caches +OpenPype is using Alembic format for point caches. Workflow is very similar as +other data types. + +### Creating Point Caches + +To create point cache just create whatever hierarchy you want and animate it. +Select its root and Go **OpenPype β†’ Create...** and select **Point Cache**. + +After that, publishing will create corresponding **abc** files. + +Example setup: + +![Maya - Point Cache Example](assets/maya-pointcache_setup.png) + +### Loading Point Caches + +Loading point cache means creating reference to **abc** file with Go **OpenPype β†’ Load...**. + +Example result: + +![Maya - Point Cache Example](assets/maya-pointcache_loaded.png) + +## Set dressing in Maya + +Set dressing is term for easily populate complex scenes with individual parts. +OpenPype allows to version and manage those sets. + +### Publishing Set dress / Layout + +Working with Set dresses is very easy. Just load your assets into scene with +[Loader](artist_tools.md#loader) (**OpenPype β†’ Load...**). Populate your scene as +you wish, translate each piece to fit your need. When ready, select all imported +stuff and go **OpenPype β†’ Create...** and select **Set Dress** or **Layout**. +This will create set containing your selection and marking it for publishing. + +:::note set dress vs layout +Currently *set dress* and *layout* are functionally identical +::: + +Now you can publish is with **OpenPype β†’ Publish**. + +### Loading Set dress / Layout + +You can load Set dress / Layout using [Loader](artist_tools.md#loader) +(**OpenPype β†’ Load...**). Select you layout or set dress, right click on it and +select **Reference Maya Ascii (ma)**. This will populate your scene with all those +models you've put into layout. + +## Rendering with OpenPype + +OpenPype in Maya can be used for submitting renders to render farm and for their +subsequent publishing. Right now OpenPype support [AWS Thinkbox Deadline](https://www.awsthinkbox.com/deadline) +and [Virtual Vertex Muster](https://www.vvertex.com/overview/). + +* For setting up Muster support see [admin section](admin_config#muster) +* For setting up Deadline support see [here](admin_config#aws-thinkbox-deadline) + +:::note Muster login +Muster is now configured so every user must log in to get authentication support. If OpenPype founds out this token is missing or expired, it will ask again for credentials. +::: + +### Creating basic render setup + +If you want to submit your render to farm, just follow these simple steps. + +#### Preparing scene + +Lets start with empty scene. First I'll pull in my favorite Buddha model. +**OpenPype β†’ Load...**, select model and right+click to pop up context menu. From +there just click on **Reference (abc)**. + +Next, I want to be sure that I have same frame range as is set on shot I am working +on. To do this just **OpenPype β†’ Reset Frame Range**. This should set Maya timeline to same +values as they are set on shot in *Ftrack* for example. + +I have my time set, so lets create some animation. We'll turn Buddha model around for +50 frames (this is length of my timeline). + +Select model, go to first frame, key Y axis rotation, go to last frame, enter 360 to +**Channel Editor** Y rotation, key it and its done. If you are not sure how to do it, +you are probably reading wrong documentation. + +Now let set up lights, ground and camera. I am lazy so I create Arnolds Skydome light: +**Arnold β†’ Lights β†’ Skydome Light**. As ground simple Plane will suffice and I'll set +my perspective view as I like and create new camera from it (`CTRL+SHIFT+C`) and rename +it from `persp1` to `mainCamera`. + +One last thing, I'll assign basic *aiSurfaceShader* to my Buddha and do some little +tweaks on it. + +#### Prepare scene for submission + +As we have working simple scene we can start preparing it for rendering. OpenPype is fully utilizing +Render Setup layers for this. First of all, we need to create *Render instance* to tell OpenPype what +to do with renders. You can easily render locally or on render farm without it, but *Render instance* +is here to mark render layers you want to publish. + +Lets create it. Go **OpenPype β†’ Create...**. There select **Render** from list. If you keep +checked **Use selection** it will use your current Render Layers (if you have them). Otherwise, +if no render layers is present in scene, it will create one for you named **Main** and under it +default collection with `*` selector. + +No matter if you use *Deadline* or *Muster*, OpenPype will try to connect to render farm and +fetch machine pool list. + +:::note Muster login +This might fail on *Muster* in the event that you have expired authentication token. In that case, you'll be presented with login window. Nothing will be created in the scene until you log in again and do create **Render** again. +::: + +So now my scene now looks like this: + +![Maya - Render scene Setup](assets/maya-render_setup.jpg) + +You can see that it created `renderingMain` set and under it `LAYER_Main`. This set corresponds to +**Main** render layer in Render Setup. This was automatically created because I had not created any +render layers in scene before. If you already have layers and you use **Use selection**, they will +appear here, prefixed with `LAYER_`. Those layer set are created whenever you create new layer in +Render Setup and are deleted if you delete layer in Render Setup. However if you delete `LAYER_` set, +layer in Render Setup isn't deleted. It just means it won't be published. + +Creating *Render instance* will also set image prefix in render settings to OpenPype defaults based on +renderer you use - for example if you render with Arnold, it is `maya///_`. + +There are few setting on *Render instance* `renderingMain` in **Attributes Editor**: + +![Maya - Render attributes](assets/maya-renderglobals.jpg) + +Few options that needs explaining: + +* `Primary Pool` - here is list of pool fetched from server you can select from. +* `Suspend publish Job` - job sent to farm will not start render automatically +but is in *waiting* state. +* `Extend Frames` - if checked it will add new frames to previous render, so you can +extend previous image sequence. +* `Override Existing Frame` - will overwrite file in destination if they exists +* `Priority` is priority of job on farm +* `Frames Per Task` is number of sequence division between individual tasks (chunks) +making one job on farm. + +Now if you run publish, you notice there is in right column new item called +`Render Layers` and in it there is our new layer `Main (999_abc_0010) [1-10]`. First part is +layer name, second `(999_abc_0010)` is asset name and rest is frame range. + +![Maya - Render Publish errors](assets/maya-render_publish_detail1.jpg) + +You see I already tried to run publish but was stopped by few errors. Lets go +through them one by one just to see what we need to set up further in scene for +successful publish. + +**No Default Cameras Renderable** is telling me: + +```fix +Renderable default cameras found: [u'|persp|perspShape'] +``` + +and so can be resolved by simple change in *Main* layer render settings. +All I have to do is just remove the `persp` camera from render settings and add there correct camera. + +This leaves me only with **Render Settings** error. If I click on it to see +details, I see it has problem with animation not being enabled: + +```fix +Animation needs to be enabled. Use the same frame for start and end to render single frame +``` + +Go to **Render Settings**, select your render layer and in **Common** tab change +in **File Output** `Frame/Animation ext` to whatever you want, just not _Single Frame_. +Set **Frame Range** `Start frame` and `End frame` according your needs. + +If you run into problems with *image file prefix* - this should be set correctly when +creating *Render instance*, but you can tweak it. It needs to begin with `maya/` token +to avoid render conflicts between DCCs. It needs to have `` or `` (vray) and +`` or `` (vray). If you have more then one renderable cameras, add `` token. + +Sane default for arnold, redshift or renderman is: + +```fix +maya//_ +``` + +and for vray: + +```fix +maya// +``` + +Doing **OpenPype β†’ Reset Resolution** will set correct resolution on camera. + +Scene is now ready for submission and should publish without errors. + +:::tip what happens when I publish my render scene +When publishing is finished, job is created on farm. This job has one more dependent job connected to itself. +When render is finished, this other job triggers in and run publish again, but this time it is publishing rendered image sequence and creating quicktime movie for preview from it. Only those rendered sequences that have **beauty** AOV get preview as it doesn't make sense to make it for example from cryptomatte. +::: + +### Attaching render to subset + +You can create render that will be attached to another subset you are publishing, rather than being published on its own. Let's assume, you want to render a model turnaround. +In the scene from where you want to publish your model create *Render subset*. Prepare your render layer as needed and then drag +model subset (Maya set node) under corresponding `LAYER_` set under *Render instance*. During publish, it will submit this render to farm and +after it is rendered, it will be attached to your model subset. + +## Render Setups + +### Publishing Render Setups + +OpenPype can publish whole **Render Settings** setup. You can then version in and load it to +any Maya scene. This helps TDs to distribute per asset/shots render settings for Maya. + +To publish render settings, go **OpenPype β†’ Create...** and select **Render Setup Preset**. + +In your scene will appear set `rendersetup`. This one has no settings, only its presence +in scene will trigger publishing of render settings. + +When you publish scene, current settings in **Render Settings** will be serialized to json file. + +### Loading Render Setups + +In any scene, you can load published render settings with **OpenPype β†’ Load...**. Select your published +render setup settings, right+click on it and select **Load RenderSetup template**. + +This will load and parse json file and apply all setting there to your Render Setting. + +:::warning +This will overwrite all setting you already have. +::: + +## Reviews + +OpenPype supports creating review video for almost any type of data you want to publish. +What we call review video is actually _playblast_ or _capture_ (depending on terminology +you are familiar with) made from pre-defined camera in scene. This is very useful +in cases where you want to add turntable preview of your model for example. But it can +be used to generate preview for animation, simulations, and so on. + +### Setting scene for review extraction + +Lets see how review publishing works on simple scene. We will publish model with +turntable preview video. + +I'll be using Stanford University dragon model. Start with empty scene. +Create your model, import it or load from OpenPype. I'll just import model as OBJ +file. + +After we have our model in, we need to set everything to be able to publish it +as model - for detail see [Publishing models](artist_hosts_maya#publishing-models). + +To recap - freeze transforms, rename it to `dragon_GEO` and put it into group +`dragon_GRP`. Then select this group and **OpenPype β†’ Create...** and choose **Model**. + +Now, lets create camera we need to generate turntable video. I prefer to animate +camera itself and not model because all animation keys will be associated with camera +and not model we want to publish. + +I've created camera, named it `reviewCamera` and parent it under `reviewRotation_LOC` +locator. I set my timeline to 50 frames, key `reviewRotation_LOC` Y axis on frame +1 to 0 and on frame 50 to 360. I've also set animation curve between those two keys +to linear. + +To mark camera to be used for review, select camera `reviewCamera` and go **OpenPype β†’ Create...** +and choose **Review**. + +This will create set `review` including selected camera. You can set few options +on this set to control review video generation: + +* `Active` - control on/off state +* `Frame Start` - starting frame for review +* `Frame End` - end frame for review +* `Handles` - number of handle frame before and after +* `Step` - number of steps +* `Fps` - framerate + +This is my scene: + +![Maya - Review model setup](assets/maya-model_review_setup.jpg) + +_* note that I had to fix UVs and normals on Stanford dragon model as it wouldn't pass +model validators_ + +### Publishing model with review + +You can now publish your model and generate review video. Go **OpenPype β†’ Publish...**, +validate if you will, and publish it. During publishing, Maya will create _playblast_ +for whole frame range you've specified, then it will pass those frames to _ffmpeg_. +That will create video file, pass it to another extractor creating burnins in it +and finally uploading this video to ftrack with your model (or other type) published +version. All parts of this process - like what burnins, what type of video file, +settings for Maya playblast - can be customized by your TDs. For more information +about customizing review process refer to [admin section](admin_presets_plugins). + + +## Working with Yeti in OpenPype + +OpenPype can work with [Yeti](https://peregrinelabs.com/yeti/) in two data modes. +It can handle Yeti caches and Yeti rigs. + +### Creating and publishing Yeti caches + +Let start by creating simple Yeti setup, just one object and Yeti node. Open new +empty scene in Maya and create sphere. Then select sphere and go **Yeti β†’ Create Yeti Node on Mesh** +Open Yeti node graph **Yeti β†’ Open Graph Editor** and create setup like this: + +![Maya - Yeti Basic Graph](assets/maya-yeti_basic_setup.jpg) + +It doesn't matter what setting you use now, just select proper shape in first +*Import* node. Select your Yeti node and create *Yeti Cache instance* - **OpenPype β†’ Create...** +and select **Yeti Cache**. Leave `Use selection` checked. You should end up with this setup: + +![Maya - Yeti Basic Setup](assets/maya-yeti_basic_setup_outline.jpg) + +You can see there is `yeticacheDefault` set. Instead of *Default* it could be named with +whatever name you've entered in `subset` field during instance creation. + +We are almost ready for publishing cache. You can check basic settings by selecting +Yeti cache set and opening *Extra attributes* in Maya **Attribute Editor**. + +![Maya - Yeti Basic Setup](assets/maya-yeti_cache_attributes.jpg) + +Those attributes there are self-explanatory, but: + +- `Preroll` is number of frames simulation will run before cache frames are stored. +This is usefull to "steady" simulation for example. +- `Frame Start` from what frame we start to store cache files +- `Frame End` to what frame we are storing cache files +- `Fps` of cache +- `Samples` how many time samples we take during caching + +You can now publish Yeti cache as any other types. **OpenPype β†’ Publish**. It will +create sequence of `.fur` files and `.fursettings` metadata file with Yeti node +setting. + +### Loading Yeti caches + +You can load Yeti cache by **OpenPype β†’ Load ...**. Select your cache, right+click on +it and select **Load Yeti cache**. This will create Yeti node in scene and set its +cache path to point to your published cache files. Note that this Yeti node will +be named with same name as the one you've used to publish cache. Also notice that +when you open graph on this Yeti node, all nodes are as they were in publishing node. + +### Creating and publishing Yeti Rig + +Yeti Rigs are working in similar way as caches, but are more complex and they deal with +other data used by Yeti, like geometry and textures. + +Let's start by [loading](artist_hosts_maya#loading-model) into new scene some model. +I've loaded my Buddha model. + +Create select model mesh, create Yeti node - **Yeti β†’ Create Yeti Node on Mesh** and +setup similar Yeti graph as in cache example above. + +Then select this Yeti node (mine is called with default name `pgYetiMaya1`) and +create *Yeti Rig instance* - **OpenPype β†’ Create...** and select **Yeti Cache**. +Leave `Use selection` checked. + +Last step is to add our model geometry to rig instance, so middle+drag its +geometry to `input_SET` under `yetiRigDefault` set representing rig instance. +Note that its name can differ and is based on your subset name. + +![Maya - Yeti Rig Setup](assets/maya-yeti_rig.jpg) + +Save your scene and ready for publishing our new simple Yeti Rig! + +Go to publish **OpenPype β†’ Publish** and run. This will publish rig with its geometry +as `.ma` scene, save Yeti node settings and export one frame of Yeti cache from +the beginning of your timeline. It will also collect all textures used in Yeti +node, copy them to publish folder `resource` directory and set *Image search path* +of published node to this location. + +:::note Collect Yeti Cache failure +If you encounter **Collect Yeti Cache** failure during collecting phase, and the error is like +```fix +No object matches name: pgYetiMaya1Shape.cbId +``` +then it is probably caused by scene not being saved before publishing. +::: + +### Loading Yeti Rig + +You can load published Yeti Rigs as any other thing in OpenPype - **OpenPype β†’ Load ...**, +select you Yeti rig and right+click on it. In context menu you should see +**Load Yeti Cache** and **Load Yeti Rig** items (among others). First one will +load that one frame cache. The other one will load whole rig. + +Notice that although we put only geometry into `input_SET`, whole hierarchy was +pulled inside also. This allows you to store complex scene element along Yeti +node. + +:::tip auto-connecting rig mesh to existing one +If you select some objects before loading rig it will try to find shapes +under selected hierarchies and match them with shapes loaded with rig (published +under `input_SET`). This mechanism uses *cbId* attribute on those shapes. +If match is found shapes are connected using their `outMesh` and `outMesh`. Thus you can easily connect existing animation to loaded rig. +::: + +## Using Redshift Proxies + +OpenPype supports working with Redshift Proxy files. You can create Redshift Proxy from almost +any hierarchy in Maya and it will be included there. Redshift can export animation +proxy file per frame. + +### Creating Redshift Proxy + +To mark data to publish as Redshift Proxy, select them in Maya and - **OpenPype β†’ Create ...** and +then select **Redshift Proxy**. You can name your subset and hit **Create** button. + +You can enable animation in Attribute Editor: + +![Maya - Yeti Rig Setup](assets/maya-create_rs_proxy.jpg) + +### Publishing Redshift Proxies + +Once data are marked as Redshift Proxy instance, they can be published - **OpenPype β†’ Publish ...** + +### Using Redshift Proxies + +Published proxy files can be loaded with OpenPype Loader. It will create mesh and attach Redshift Proxy +parameters to it - Redshift will then represent proxy with bounding box. diff --git a/website/docs/artist_hosts_nuke.md b/website/docs/artist_hosts_nuke.md new file mode 100644 index 0000000000..1e02599570 --- /dev/null +++ b/website/docs/artist_hosts_nuke.md @@ -0,0 +1,145 @@ +--- +id: artist_hosts_nuke +title: Nuke +sidebar_label: Nuke +--- + +:::important +After Nuke starts it will automatically **Apply All Settings** for you. If you are sure the settings are wrong just contact your supervisor and he will set them correctly for you in project database. +::: + +:::note +The workflows are identical for both. We are supporting versions **`11.0`** and above. +::: + +## OpenPype global tools + +- [Set Context](artist_tools.md#set-context) +- [Work Files](artist_tools.md#workfiles) +- [Create](artist_tools.md#creator) +- [Load](artist_tools.md#loader) +- [Manage (Inventory)](artist_tools.md#inventory) +- [Publish](artist_tools.md#publisher) +- [Library Loader](artist_tools.md#library-loader) + +## Nuke specific tools + +
+
+ +### Set Frame Ranges + +Use this feature in case you are not sure the frame range is correct. + +##### Result + +- setting Frame Range in script settings +- setting Frame Range in viewers (timeline) + +
+
+ +![Set Frame Ranges](assets/nuke_setFrameRanges.png) + +
+
+ + +
+ +![Set Frame Ranges Timeline](assets/nuke_setFrameRanges_timeline.png) + +
+ +1. limiting to Frame Range without handles +2. **Input** handle on start +3. **Output** handle on end + +
+
+ +### Set Resolution + +
+
+ + +This menu item will set correct resolution format for you defined by your production. + +##### Result + +- creates new item in formats with project name +- sets the new format as used + +
+
+ +![Set Resolution](assets/nuke_setResolution.png) + +
+
+ + +### Set Colorspace + +
+
+ +This menu item will set correct Colorspace definitions for you. All has to be configured by your production (Project coordinator). + +##### Result + +- set Colorspace in your script settings +- set preview LUT to your viewers +- set correct colorspace to all discovered Read nodes (following expression set in settings) + +
+
+ +![Set Colorspace](assets/nuke_setColorspace.png) + +
+
+ + +### Apply All Settings + +
+
+ +It is usually enough if you once per while use this option just to make yourself sure the workfile is having set correct properties. + +##### Result + +- set Frame Ranges +- set Colorspace +- set Resolution + +
+
+ +![Apply All Settings](assets/nuke_applyAllSettings.png) + +
+
+ +### Build Workfile + +
+
+ +This tool will append all available subsets into an actual node graph. It will look into database and get all last [versions](artist_concepts.md#version) of available [subsets](artist_concepts.md#subset). + + +##### Result + +- adds all last versions of subsets (rendered image sequences) as read nodes +- adds publishable write node as `renderMain` subset + +
+
+ +![Build First Work File](assets/nuke_buildFirstWorkfile.png) + +
+
\ No newline at end of file diff --git a/website/docs/artist_hosts_photoshop.md b/website/docs/artist_hosts_photoshop.md new file mode 100644 index 0000000000..7e84349565 --- /dev/null +++ b/website/docs/artist_hosts_photoshop.md @@ -0,0 +1,107 @@ +--- +id: artist_hosts_photoshop +title: Photoshop +sidebar_label: Photoshop +--- + +## Available Tools + +- [Work Files](artist_tools.md#workfiles) +- [Create](artist_tools.md#creator) +- [Load](artist_tools.md#loader) +- [Publish](artist_tools.md#publisher) +- [Manage](artist_tools.md#inventory) + +## Setup + +To install the extension, download, install [Anastasyi's Extention Manager](https://install.anastasiy.com/). Open Anastasyi's Extension Manager and select Photoshop in menu. Then go to `{path to pype}/repos/avalon-core/avalon/photoshop/extension.zxp`. Drag extension.zxp and drop it to Anastasyi's Extension Manager. The extension will install itself. + +## Usage + +When you launch Photoshop you will be met with the Workfiles app. If dont have any previous workfiles, you can just close this window. + +In Photoshop you can find the tools in the `OpenPype` extension: + +![Extension](assets/photoshop_extension.PNG) + +You can show the extension panel by going to `Window` > `Extensions` > `OpenPype`. + +### Create + +When you have created an image you want to publish, you will need to create special groups or tag existing groups. To do this open the `Creator` through the extensions `Create` button. + +![Creator](assets/photoshop_creator.PNG) + +With the `Creator` you have a variety of options to create: + +- Check `Use selection` (A dialog will ask whether you want to create one image per selected layer). + - Yes. + - No selection. + - This will create a single group named after the `Subset` in the `Creator`. + - Single selected layer. + - The selected layer will be grouped under a single group named after the selected layer. + - Single selected group. + - The selected group will be tagged for publishing. + - Multiple selected items. + - Each selected group will be tagged for publishing and each layer will be grouped individually. + - No. + - All selected layers will be grouped under a single group named after the `Subset` in the `Creator`. +- Uncheck `Use selection`. + - This will create a single group named after the `Subset` in the `Creator`. + +### Publish + +When you are ready to share some work, you will need to publish. This is done by opening the `Pyblish` through the extensions `Publish` button. + +![Publish](assets/photoshop_publish.PNG) + +This tool will run through checks to make sure the contents you are publishing is correct. Hit the "Play" button to start publishing. + +You may encounter issues with publishing which will be indicated with red squares. If these issues are within the validation section, then you can fix the issue. If there are issues outside of validation section, please let the OpenPype team know. + +#### Repair Validation Issues + +All validators will give some description about what the issue is. You can inspect this by going into the validator through the arrow: + +![Inspect](assets/photoshop_publish_inspect.PNG) + +You can expand the errors by clicking on them for more details: + +![Expand](assets/photoshop_publish_expand.PNG) + +Some validator have repair actions, which will fix the issue. If you can identify validators with actions by the circle icon with an "A": + +![Actions](assets/photoshop_publish_actions.PNG) + +To access the actions, you right click on the validator. If an action runs successfully, the actions icon will turn green. Once all issues are fixed, you can just hit the "Refresh" button and try to publish again. + +![Repair](assets/photoshop_publish_repair.gif) + +### Load + +When you want to load existing published work, you can load in smart layers through the `Loader`. You can reach the `Loader` through the extension's `Load` button. + +![Loader](assets/photoshop_loader.PNG) + +The supported families for Photoshop are: + +- `image` + +To load an image, right-click on the subset you want and choose a representation: + +![Loader](assets/photoshop_loader_load.gif) + +### Manage + +Now that we have some images loaded, we can manage which version is loaded. This is done through the `Scene Inventory`. You can reach it through the extension's `Manage` button. + +:::note +Loaded images has to stay as smart layers in order to be updated. If you rasterize the layer, you cannot update it to a different version. +::: + +![Loader](assets/photoshop_manage.PNG) + +You can switch to a previous version of the image or update to the latest. + +![Loader](assets/photoshop_manage_switch.gif) +![Loader](assets/photoshop_manage_update.gif) diff --git a/website/docs/artist_hosts_unreal.md b/website/docs/artist_hosts_unreal.md new file mode 100644 index 0000000000..1c2c22937c --- /dev/null +++ b/website/docs/artist_hosts_unreal.md @@ -0,0 +1,40 @@ +--- +id: artist_hosts_unreal +title: Unreal +sidebar_label: Unreal +--- + +## Introduction + +OpenPype supports Unreal in similar ways as in other DCCs Yet there are few specific you need to be aware of. + +### Project naming + +Unreal doesn't support project names starting with non-alphabetic character. So names like `123_myProject` are +invalid. If OpenPype detects such name it automatically prepends letter **P** to make it valid name, so `123_myProject` will become `P123_myProject`. There is also soft-limit on project name length to be shorter then 20 characters. Longer names will issue warning in Unreal Editor that there might be possible side effects. + +## OpenPype global tools + +OpenPype global tools can be found in *Window* main menu: + +![Unreal OpenPype Menu](assets/unreal-avalon_tools.jpg) + +- [Create](artist_tools.md#creator) +- [Load](artist_tools.md#loader) +- [Manage (Inventory)](artist_tools.md#inventory) +- [Publish](artist_tools.md#publisher) +- [Library Loader](artist_tools.md#library-loader) + +## Static Mesh + +### Loading + +To import Static Mesh model, just choose **OpenPype β†’ Load ...** and select your mesh. Static meshes are transfered as FBX files as specified in [Unreal Engine 4 Static Mesh Pipeline](https://docs.unrealengine.com/en-US/Engine/Content/Importing/FBX/StaticMeshes/index.html). This action will create new folder with subset name (`unrealStaticMeshMain_CON` for example) and put all data into it. Inside, you can find: + +![Unreal Container Content](assets/unreal-container.jpg) + +In this case there is **lambert1**, material pulled from Maya when this static mesh was published, **unrealStaticMeshCube** is the geometry itself, **unrealStaticMeshCube_CON** is a *AssetContainer* type and is there to mark this directory as Avalon Container (to track changes) and to hold OpenPype metadata. + +### Publishing + +Publishing of Static Mesh works in similar ways. Select your mesh in *Content Browser* and **OpenPype β†’ Create ...**. This will create folder named by subset you've choosen - for example **unrealStaticMeshDefault_INS**. It this folder is that mesh and *Avalon Publish Instance* asset marking this folder as publishable instance and holding important metadata on it. If you want to publish this instance, go **OpenPype β†’ Publish ...** \ No newline at end of file diff --git a/website/docs/artist_publish.md b/website/docs/artist_publish.md new file mode 100644 index 0000000000..a7cb7a1bad --- /dev/null +++ b/website/docs/artist_publish.md @@ -0,0 +1,184 @@ +--- +id: artist_publish +title: Publishing +sidebar_label: Publishing +--- + +## What is publishing? + +A process of exporting particular data from your work scene to be shared with others. + +Think of publishing as a checkpoint between two people, making sure that we catch mistakes as soon as possible and don’t let them pass through pipeline step that would eventually need to be repeated if these mistakes are not caught. + +Every time you want to share a piece of work with others (be it camera, model, textures, animation or whatever), you need toΒ publishΒ this data. The main reason is to save time down the line and make it very clear what can and cannot be used in production. +This process should mostly be handled by publishing scripts but in certain cases might have to be done manually. + +Published assets should comply to these rules: + +- Clearly named, based on internal naming conventions. +- Versioned (with master version created for certain types of assets). +- Immediately usable, without any dependencies to unpublished assets or work files. +- Immutable + +All of these go into the publish folder for the given entity (shot, asset, sequence) + +:::note +Keep in mind that while publishing the data might take you some extra time, it will save much more time in the long run when your colleagues don’t need to dig through your work files trying to understand them and find that model you saved by hand. +::: + +## Families: + +The Instances are categorized into β€˜families’ based on what type of data they contain. Some instances might have multiple families if needed. A shot camera will for example have families 'camera' and 'review' to indicate that it's going to be used for review quicktime, but also exported into a file on disk. + +Following family definitions and requirements are OpenPype defaults and what we consider good industry practice, but most of the requirements can be easily altered to suit the studio or project needs. +Here's a list of supported families + +| Family | Comment | Example Subsets | +| ----------------------- | ------------------------------------------------ | ------------------------- | +| [Model](#model) | Cleaned geo without materials | main, proxy, broken | +| [Look](#look) | Package of shaders, assignments and textures | main, wet, dirty | +| [Rig](#rig) | Characters or props with animation controls | main, deform, sim | +| [Assembly](#assembly) | A complex model made from multiple other models. | main, deform, sim | +| [Layout](#layout) | Simple representation of the environment | main, | +| [Setdress](#setdress) | Environment containing only referenced assets | main, | +| [Camera](#camera) | May contain trackers or proxy geo | main, tracked, anim | +| [Animation](#animation) | Animation exported from a rig. | characterA, vehicleB | +| [Cache](#cache) | Arbitrary animated geometry or fx cache | rest, ROM , pose01 | +| MayaAscii | Maya publishes that don't fit other categories | | +| [Render](#render) | Rendered frames from CG or Comp | | +| RenderSetup | Scene render settings, AOVs and layers | | +| Plate | Ingested, transcode, conformed footage | raw, graded, imageplane | +| Write | Nuke write nodes for rendering | | +| Image | Any non-plate image to be used by artists | Reference, ConceptArt | +| LayeredImage | Software agnostic layered image with metadata | Reference, ConceptArt | +| Review | Reviewable video or image. | | +| Matchmove | Matchmoved camera, potentially with geometry | main | +| Workfile | Backup of the workfile with all its content | uses the task name | +| Nukenodes | Any collection of nuke nodes | maskSetup, usefulBackdrop | +| Yeticache | Cached out yeti fur setup | | +| YetiRig | Yeti groom ready to be applied to geometry cache | main, destroyed | +| VrayProxy | Vray proxy geometry for rendering | | +| VrayScene | Vray full scene export | | +| ArnodldStandin | All arnold .ass archives for rendering | main, wet, dirty | +| LUT | | | +| Nukenodes | | | +| Gizmo | | | +| Nukenodes | | | +| Harmony.template | | | +| Harmony.pallette | | | + + + +### Model + +Clean geometry without any material assignments. Published model can be as small as a single mesh, or as complex as a full building. That is purely up to the artist or the supervisor. Models can contain hierarchy defined by groups or nulls for better organisation. + +Apart from model subsets, we also support LODs as extra level on top of subset. To publish LODs, you just need to prepare subsets for publishing names `modelMySubsetName_LOD##`, if OpenPype finds `_LOD##` (hashes replaced with LOD level), it will automatically be considered a LOD of the given subset. + +Example Subsets: +`modelMain`, `modelProxy`, `modelSculpt`, `modelBroken`, `modelMain_LOD01`, `modelMain_LOD02` + +Example representations: +`.ABC`, `.MA`, `.MB`, `.BLEND`, `.OBJ`, `.FBX` + + +### Look + +A package of materials, shaders, assignments, textures and attributes that collectively define a look of a model for rendering or preview purposes. This can usually be applied only to the model is was authored for, or its corresponding cache, however, material sharing across multiple models is also possible. A look should be fully self-contained and ready for rendering. + +Example Subsets: +`lookMain`, `lookProxy`, `lookWet`, `lookDirty`, `lookBlue`, `lookRed` + +Example Representations: +`.MA + .JSON`, `.MTLX (yet unsupported)`, `.BLEND` + +Please note that a look is almost never a single representation, but a combination of multiple. +For example in Maya a look consists of `.ma` file with the shaders, `.json` file which +contains the attributes and assignments and `/resources` folder with all the required textures. + + +### Rig + +Characters or props with animation controls or other parameters, ready to be referenced into a scene and animated. Animation Rigs tend to be very software specific, but in general they tend to consist of Geometry, Bones or Joints, Controllers and Deformers. OpenPype in maya supports both, self-contained rigs, that include everything in one file, but also rigs that use nested references to bring in geometry, or even skeleton. By default we bake rigs into a single file during publishing, but that behaviour can be turned off to keep the nested references live in the animation scenes. + +Example Subsets: +`rigMain`, `rigMocap`, `rigSim`, `rigCamera`, `rigMuscle` + +Example Representations: +`.MA`, `.MB`, `.BLEND`, `.HDA` + + +### Assembly + +A subset created by combining two or more smaller subsets into a composed bigger asset. +A good example would be a restaurant table asset with the cutlery and chairs included, +that will eventually be loaded into a restaurant Set. Instead of loading each individual +fork and knife for each table in the restaurant, we can first prepare `assemblyRestaurantTable` subset +which will contain the table itself, with cutlery, flowers, plates and chairs nicely arranged. + +This table can then be loaded multiple times into the restaurant for easier scene management +and updates. + +Extracted assembly doesn't contain any geometry directly, but rather information about all the individual subsets that are inside the assembly, their version and transformations. On top of that and alembic is exported which only holds any extra transforms and groups that are needed to fully re-create the original assembled scene. + +Assembly ca also be used as a sort of collection of elements that are often used together in the shots. For example if we're set dressing lot's of forest shots, it would make sense to make and assembly of all the forest elements for scattering so we don't have to load them individually into each shot. + +Example Subsets: +`assemblyTable`, `assemblyForestElements`, `assemblyRoof` + +Example Representations: +`.ABC + .JSON` + + + +### Setdress + +Fully prepared environment scene assembled from other previously published assets. Setdress should be ready for rendering as is, including any instancing, material assignments and other complex setups the environment requires. Due to this complexity, setdress is currently only publishable in the native file format of the host where it was created. In maya that would be `.ma` or `.mb` file. + + +### Camera + +Clean virtual camera without any proprietary rigging, or host specific information. Considering how widely across the hosts published cameras are used in production, published camera should ideally be as simple and clean as possible to ensure consistency when loaded into various hosts. + + +Example Representations: +`.MA`, `.ABC` + + +### Cache + +Geometry or effect with baked animation. Cache is usually exported as alembic, +but can be potentially any other representation that makes sense in the given scenario. +Cache is defined by the artist directly in the fx or animation scene. + +Example Subsets: +`assemblyTable`, `assemblyForestElements`, `assemblyRoof` + +Example Representations: +`.ABC`, `.VDB`, `.BGEO` + + +### Animation + +Published result of an animation created with a rig. Animation can be extracted +as animation curves, cached out geometry or even fully animated rig with all the controllers. +Animation cache is usually defined by a rigger in the rig file of a character or +by FX TD in the effects rig, to ensure consistency of outputs. + +Example Subsets: +`animationBob_01`, `animationJack_02`, `animationVehicleA` + +Example Representations: +`.MA`, `.ABC`, `.JSON` + + +### Yeti Cache + +Cached out yeti fur simulation that originates from a yeti rig applied in the shot context. + + +### Yeti Rig + +Yeti groom setup ready to be applied to a cached out character in the shot context. + +### Render diff --git a/website/docs/artist_tools.md b/website/docs/artist_tools.md new file mode 100644 index 0000000000..f03ea8e249 --- /dev/null +++ b/website/docs/artist_tools.md @@ -0,0 +1,414 @@ +--- +id: artist_tools +title: Tools +sidebar_label: Tools +--- + +## Set Context + + +
+
+ +Any time your host app is open in defined context it can be changed to different hierarchy, asset or task within a project. This will allow you to change your opened session to any other asset, shot and tasks within the same project. This is useful particularly in cases where your host takes long time to start. + +
+
+ +![workfiles_1](assets/tools_context_manager.png) +
+
+ + + +:::note + +Notice that the window doesn't close after hitting `Accept` and confirming the change of context. This behaviour let's you keep the window open and change the context multiple times in a row. +::: + +## Creator + +### Details + +Despite the name, Creator isn't for making new content in your scene, but rather taking what's already in it and creating all the metadata your content needs to be published. + +In Maya this means creating a set with everything you want to publish and assigning custom attributes to it so it get's picked up during publishing stage. + +In Nuke it's either converting an existing write node to a publishable one, or simply creating a write node with all the correct settings and outputs already set. + +### Usage + +1. select what you want to publish from your scenes +2. Open *Creator* from OpenPype menu +3. Choose what family (data type) you need to export +4. Type the name for you export. This name is how others are going to be able to refer to this particular subset when loading it into their scenes. Every assets should have a Main subset, but can have any number of other variants. +5. Click on *Create* + + * * * + +## Loader +Loader loads published subsets into your current scene or script. + +### Usage +1. open *Loader* from OpenPype menu +2. select the asset where the subset you want to load is published +3. from subset list select the subset you want +4. right-click the subset +5. from action menu select what you want to do *(load, reference, ...)* + + +![tools_loader_1](assets/tools/tools_loader_1.png) + +
+
+ +### Refresh data +Data are not auto-refreshed to avoid database issues. To refresh assets or subsets press refresh button. + +
+
+ +![tools_loader_50](assets/tools/tools_loader_50.png) + +
+
+ +### Load another version +Loader by default load last version, but you can of course load another versions. Double-click on the subset in the version column to expose the drop down, choose version you want to load and continue from point 4 of the [Usage](#usage-1). + +
+
+ + ![tools_loader_21](assets/tools/tools_loader_21.png) +
+
+ + ![tools_loader_22](assets/tools/tools_loader_22.png) +
+
+ + +### Filtering + +#### Filter Assets and Subsets by name +To filter assets/subsets by name just type name or part of name to filter text input. Only assets/subsets containing the entered string remain. + +- **Assets filtering example** *(it works the same for subsets)*: + +
+
+ +![tools_loader_4](assets/tools/tools_loader_4-small.png) + +
+
+ +![tools_loader_5](assets/tools/tools_loader_5-small.png) + +
+
+ + +#### Filter Subsets by Family + +
+
+ +To filter [subsets](artist_concepts#subset) by their [families](artist_publish#families) you can use families list where you can check families you want to see or uncheck families you are not interested in. + +
+
+ +![tools_loader_30](assets/tools/tools_loader_30-small.png) + +
+
+ + + +### Subset groups +Subsets may be grouped which can help to make the subset list more transparent. You can toggle visibility of groups with `Enable Grouping` checkbox. + +![tools_loader_40](assets/tools/tools_loader_40-small.png) + + +#### Add to group or change current group +You can set group of selected subsets with shortcut `Ctrl + G`. + +![tools_loader_41](assets/tools/tools_loader_41-small.png) + + +:::warning +You'll set the group in Avalon database so your changes will take effect for all users. +::: + +Work in progress... + +## Library Loader + +Library loader is extended [loader](#loader) which allows to load published subsets from Library projects. Controls are same but library loader has extra Combo Box which allows you to choose project you want to load from. + +
+
+ +![tools_library_1](assets/tools/tools_library_1-small.png) + +
+
+ +![tools_library_2](assets/tools/tools_library_2-small.png) + +
+
+ +* * * + +## Publisher + +> Use publish to share your work with others. It collects, validates and exports the data in standardized way. + +### Details + +When you run pyblish, the UI is made of 2 main parts. On the left, you see all the items pyblish will be working with (called instances), and on the right a list of actions that are going to process these items. +Even though every task type has some pre-defined settings of what should be collected from the scene and what items will be published by default. You can technically publish any output type from any task type. +Each item is passed through multiple plugins, each doing a small piece of work. These are organized into 4 areas and run in sequence. + +### Using Pyblish + +In the best case scenario, you open pyblish from the Avalon menu, press play, wait for it to finish, and you’re done. +These are the steps in detail, for cases, where the default settings don’t work for you or you know that the task you’re working on, requires a different treatment. + +#### Collect + +Finds all the important data in the scene and makes it ready for publishing + +#### Validate + +Each validator makes sure your output complies to one particular condition. This could be anything from naming conventions, scene setting, to plugin usage. An item can only be published if all validators pass. + +#### Extract + +Extractor takes the item and saves it to the disk. Usually to temporary location. Each extractor represents one file format and there can be multiple file formats exported for each item. + +#### Integrate + +Integrator takes the extracted files, categorizes and moves them to a correct location on the disk or on the server. + +* * * + +## Inventory + +With Scene Inventory, you can browse, update and change subsets loaded with [Loader](#loader) into your scene or script. + +:::note +You should first understand [Key concepts](artist_concepts#) to understand how you can use this tool. +::: + +### Details + + +Once a subset is loaded, it turns into a container within a scene. This containerization allows us to have a good overview of everything in the scene, but also makes it possible to change versions, notify user if something is outdated, replace one asset for another, etc. + + +The scene manager has a simple GUI focused on efficiency. You can see everything that has been previously loaded into the scene, how many time it's been loaded, what version and a lot of other information. Loaded assets are grouped by their asset name, subset name and representation. This grouping gives ability to apply changes for all instances of the loaded asset *(e.g. when __tree__ is loaded 20 times you can easily update version for all of them)*. + +![tools_scene_inventory_10](assets/tools/tools_scene_inventory_10-small.png) + +To interact with any container, you need to right click it and you'll see a drop down with possible actions. The key actions for production are already implemented, but more will be added over time. + +![tools_scene_inventory_20](assets/tools/tools_scene_inventory_20.png) + +### Usage + +#### Change version +You can change versions of loaded subsets with scene inventory tool. Version of loaded assets is colored to red when newer version is available. + + +![tools_scene_inventory_40](assets/tools/tools_scene_inventory_40.png) + +##### Update to the latest version +Select containers or subsets you want to update, right-click selection and press `Update to latest`. + +##### Change to specific version +Select containers or subsets you want to change, right-click selection, press `Set version`, select from dropdown version you want change to and press `OK` button to confirm. + + +![tools_scene_inventory_30](assets/tools/tools_scene_inventory_30.png) + + +#### Switch Asset +It's tool in Scene inventory tool that gives ability to switch asset, subset and representation of loaded assets. + + +![tools_scene_inventory_50](assets/tools/tools_scene_inventory_50.png) + + +Because loaded asset is in fact representation of version published in asset's subset it is possible to switch each of this part *(representation, version, subset and asset)*, but with limitations. Limitations are obvious as you can imagine when you have loaded `.ma` representation of `modelMain` subset from `car` asset it is not possible to switch subset to `modelHD` and keep same representation if `modelHD` does not have published `.ma` representation. It is possible to switch multiple loaded assets at once that makes this tool very powerful helper if all published assets contain same subsets and representations. + +Switch tool won't let you cross the border of limitations and inform you when you have to specify more if impossible combination occurs *(It is also possible that there will be no possible combination for selected assets)*. Border is colored to red and confirm button is not enabled when specification is required. + + +![tools_scene_inventory_55](assets/tools/tools_scene_inventory_55.png) + + +Possible switches: +- switch **representation** (`.ma` to `.abc`, `.exr` to `.dpx`, etc.) +- switch **subset** (`modelMain` to `modelHD`, etc.) + - `AND` keep same **representation** *(with limitations)* + - `AND` switch **representation** *(with limitations)* +- switch **asset** (`oak` to `elm`, etc.) + - `AND` keep same **subset** and **representation** *(with limitations)* + - `AND` keep same **subset** and switch **representation** *(with limitations)* + - `AND` switch **subset** and keep same **representation** *(with limitations)* + - `AND` switch **subset** and **representation** *(with limitations)* + +We added one more switch layer above subset for LOD (Level Of Depth). That requires to have published subsets with name ending with **"_LOD{number}"** where number represents level (e.g. modelMain_LOD1). Has the same limitations as mentioned above. This is handy when you want to change only subset but keep same LOD or keep same subset but change LOD for multiple assets. This option is hidden if you didn't select subset that have published subset with LODs. + +![tools_scene_inventory_54](assets/tools/tools_scene_inventory_54.png) +### Filtering + +#### Filter by name + +There is a search bar on the top for cases when you have a complex scene with many assets and need to find a specific one. + +
+
+ +![tools_scene_inventory_60](assets/tools/tools_scene_inventory_60-small.png) + +
+
+ +![tools_scene_inventory_61](assets/tools/tools_scene_inventory_61-small.png) + +
+
+ + +#### Filter with Cherry-pick selection + +
+
+ +To keep only selected subsets right-click selection and press `Cherry-Pick (Hierarchy)` *(Border of subset list change to **orange** color when Cherry-pick filtering is set so you know filter is applied).* + +
+
+ +![tools_scene_inventory_62-small](assets/tools/tools_scene_inventory_62-small.png) + +
+
+ +
+
+ +To return to original state right-click anywhere in subsets list and press `Back to Full-View`. + +
+
+ +![tools_scene_inventory_63-small](assets/tools/tools_scene_inventory_63-small.png) + +
+
+ + +:::tip +You can Cherry-pick from Cherry-picked subsets. +::: + +* * * + +## Workfiles + +Save new working scenes or scripts, or open the ones you previously worked on. + +### Details + +Instead of digging through your software native file browser, you can simply open the workfiles app and see all the files for the asset or shot you're currently working with. The app takes care of all the naming and the location of your work files. + +When saving a scene you can also add a comment. It is completely up to you how you use this, however we recommend using it for subversion within your current working version. + +Let's say that the last version of the comp you published was v003 and now you're working on the file prj_sh010_compositing_v004.nk if you want to keep snapshots of your work, but not iterate on the main version because the supervisor is expecting next publish to be v004, you can use the comment to do this, so you can save the file under the name prj_sh010_compositing_v004_001 , prj_sh010_compositing_v004_002. the main version is automatically iterated every time you publish something. + +### Usage + +
+
+ +#### To open existing file: + +1. Open Workfiles tool from OpenPype menu +2. Select file from list - the latest version is the highest *(descendent ordering)* +3. Press `Open` button + +
+
+ +![workfiles_1](assets/workfiles_1.png) + +
+
+ + +#### To save new workfile +1. Open Workfiles tool from OpenPype menu +2. Press `Save As` button +3. You can add optional comment to the filename, that will be appended at the end +4. Press `OK` + +:::note +You can manually override the workfile version by unticking next available version and using the version menu to choose your own. +::: + +## Look Assigner + +> The Look Manager takes care of assigning published looks to the correct model in the scene. + +### Details + +When a look is published it also stores the information about what shading networks need to be assigned to which models, but it also stores all the render attributes on the mesh necessary for a successful render. + +### Usage + +Look Assigner has GUI is made of two parts. On the left you will see the list of all the available models in the scene and on the right side, all the looks that can be associate with them. To assign a look to a model you just need to: + +1. Click on "load all subsets" +2. Choose a subset from the menu on the left +3. Right click on a look from the list on the right +4. Choose "Assign" + +At this point you should have a model with all it's shaders applied correctly. The tool automatically loads the latest look available. + + +## Subset Manager + +> Subset Manager lists all items which are meant for publishig and will be published if Publish is triggered + +### Details + +One or more items (instances) could be published any time Publish process is started. Each this publishable +item must be created by Creator tool previously. Subset Manager provides easy way how to check which items, +and how many, will be published. + +It also provides clean and preferrable way how to remove unwanted item from publishing. + +### Usage + +Subset Manager has GUI is made of two parts. On the left you will see the list of all the available publishable items in the scene and on the right side, details about these items. + +
+ +![subset_manager](assets/tools_subset_manager.png) +
+ +Any time new item is Created, it will show up here. + +Currently there is only single action, 'Remove instance' which cleans workfile file from publishable item metadata. +This might not remove underlying host item, it depends on host and implementation! + +It might also happen that user deletes underlying host item(for example layer in Photoshop) directly in the host, but metadata will stay. +This could result in phantom issues during publishing. Use Subset Manager to purge workfile from abandoned items. + +Please check behaviour in host of your choice. diff --git a/website/docs/artist_work.md b/website/docs/artist_work.md new file mode 100644 index 0000000000..de2290fc0c --- /dev/null +++ b/website/docs/artist_work.md @@ -0,0 +1,23 @@ +--- +id: artist_work +title: Working on tasks +sidebar_label: Working +--- + +Check the [documentation](https://docusaurus.io) for how to use Docusaurus. + +## Lorem + +Lorem ipsum dolor sit amet, consectetur adipiscing elit. Pellentesque elementum dignissim ultricies. Fusce rhoncus ipsum tempor eros aliquam consequat. Lorem ipsum dolor sit amet, consectetur adipiscing elit. Vivamus elementum massa eget nulla aliquet sagittis. Proin odio tortor, vulputate ut odio in, ultrices ultricies augue. Cras ornare ultrices lorem malesuada iaculis. Etiam sit amet libero tempor, pulvinar mauris sed, sollicitudin sapien. + +## Mauris In Code + + Mauris vestibulum ullamcorper nibh, ut semper purus pulvinar ut. Donec volutpat orci sit amet mauris malesuada, non pulvinar augue aliquam. Vestibulum ultricies at urna ut suscipit. Morbi iaculis, erat at imperdiet semper, ipsum nulla sodales erat, eget tincidunt justo dui quis justo. Pellentesque dictum bibendum diam at aliquet. Sed pulvinar, dolor quis finibus ornare, eros odio facilisis erat, eu rhoncus nunc dui sed ex. Nunc gravida dui massa, sed ornare arcu tincidunt sit amet. Maecenas efficitur sapien neque, a laoreet libero feugiat ut. + +## Nulla + +Nulla facilisi. Maecenas sodales nec purus eget posuere. Sed sapien quam, pretium a risus in, porttitor dapibus erat. Sed sit amet fringilla ipsum, eget iaculis augue. Integer sollicitudin tortor quis ultricies aliquam. Suspendisse fringilla nunc in tellus cursus, at placerat tellus scelerisque. Sed tempus elit a sollicitudin rhoncus. Nulla facilisi. Morbi nec dolor dolor. Orci varius natoque penatibus et magnis dis parturient montes, nascetur ridiculus mus. Cras et aliquet lectus. Pellentesque sit amet eros nisi. Quisque ac sapien in sapien congue accumsan. Nullam in posuere ante. Vestibulum ante ipsum primis in faucibus orci luctus et ultrices posuere cubilia Curae; Proin lacinia leo a nibh fringilla pharetra. + +## Orci + +Orci varius natoque penatibus et magnis dis parturient montes, nascetur ridiculus mus. Proin venenatis lectus dui, vel ultrices ante bibendum hendrerit. Aenean egestas feugiat dui id hendrerit. Orci varius natoque penatibus et magnis dis parturient montes, nascetur ridiculus mus. Curabitur in tellus laoreet, eleifend nunc id, viverra leo. Proin vulputate non dolor vel vulputate. Curabitur pretium lobortis felis, sit amet finibus lorem suscipit ut. Sed non mollis risus. Duis sagittis, mi in euismod tincidunt, nunc mauris vestibulum urna, at euismod est elit quis erat. Phasellus accumsan vitae neque eu placerat. In elementum arcu nec tellus imperdiet, eget maximus nulla sodales. Curabitur eu sapien eget nisl sodales fermentum. diff --git a/website/docs/assets/aftereffects_creator.png b/website/docs/assets/aftereffects_creator.png new file mode 100644 index 0000000000..203ecb6fb7 Binary files /dev/null and b/website/docs/assets/aftereffects_creator.png differ diff --git a/website/docs/assets/aftereffects_creator_after.png b/website/docs/assets/aftereffects_creator_after.png new file mode 100644 index 0000000000..3933202f71 Binary files /dev/null and b/website/docs/assets/aftereffects_creator_after.png differ diff --git a/website/docs/assets/aftereffects_loader.png b/website/docs/assets/aftereffects_loader.png new file mode 100644 index 0000000000..9fd69b04f7 Binary files /dev/null and b/website/docs/assets/aftereffects_loader.png differ diff --git a/website/docs/assets/aftereffects_publish.png b/website/docs/assets/aftereffects_publish.png new file mode 100644 index 0000000000..4cfe3d1d6b Binary files /dev/null and b/website/docs/assets/aftereffects_publish.png differ diff --git a/website/docs/assets/blender-model_create_instance.jpg b/website/docs/assets/blender-model_create_instance.jpg new file mode 100644 index 0000000000..d0891c5d05 Binary files /dev/null and b/website/docs/assets/blender-model_create_instance.jpg differ diff --git a/website/docs/assets/blender-model_error_details.jpg b/website/docs/assets/blender-model_error_details.jpg new file mode 100644 index 0000000000..1756254e5f Binary files /dev/null and b/website/docs/assets/blender-model_error_details.jpg differ diff --git a/website/docs/assets/blender-model_example.jpg b/website/docs/assets/blender-model_example.jpg new file mode 100644 index 0000000000..98d98e903f Binary files /dev/null and b/website/docs/assets/blender-model_example.jpg differ diff --git a/website/docs/assets/blender-model_pre_publish.jpg b/website/docs/assets/blender-model_pre_publish.jpg new file mode 100644 index 0000000000..11233229c5 Binary files /dev/null and b/website/docs/assets/blender-model_pre_publish.jpg differ diff --git a/website/docs/assets/blender-model_publish_error.jpg b/website/docs/assets/blender-model_publish_error.jpg new file mode 100644 index 0000000000..260d9b9996 Binary files /dev/null and b/website/docs/assets/blender-model_publish_error.jpg differ diff --git a/website/docs/assets/blender-rig_create.jpg b/website/docs/assets/blender-rig_create.jpg new file mode 100644 index 0000000000..169ddae84f Binary files /dev/null and b/website/docs/assets/blender-rig_create.jpg differ diff --git a/website/docs/assets/blender-rig_hierarchy_before_publish.jpg b/website/docs/assets/blender-rig_hierarchy_before_publish.jpg new file mode 100644 index 0000000000..81f3916c9e Binary files /dev/null and b/website/docs/assets/blender-rig_hierarchy_before_publish.jpg differ diff --git a/website/docs/assets/blender-rig_hierarchy_example.jpg b/website/docs/assets/blender-rig_hierarchy_example.jpg new file mode 100644 index 0000000000..6ab6897650 Binary files /dev/null and b/website/docs/assets/blender-rig_hierarchy_example.jpg differ diff --git a/website/docs/assets/blender-rig_model_setup.jpg b/website/docs/assets/blender-rig_model_setup.jpg new file mode 100644 index 0000000000..6f967cdab4 Binary files /dev/null and b/website/docs/assets/blender-rig_model_setup.jpg differ diff --git a/website/docs/assets/blender-save_modelling_file.jpg b/website/docs/assets/blender-save_modelling_file.jpg new file mode 100644 index 0000000000..d7f2401c51 Binary files /dev/null and b/website/docs/assets/blender-save_modelling_file.jpg differ diff --git a/website/docs/assets/deadline_configure_event.png b/website/docs/assets/deadline_configure_event.png new file mode 100644 index 0000000000..537a18506c Binary files /dev/null and b/website/docs/assets/deadline_configure_event.png differ diff --git a/website/docs/assets/deadline_configure_plugin.png b/website/docs/assets/deadline_configure_plugin.png new file mode 100644 index 0000000000..f734ca7aca Binary files /dev/null and b/website/docs/assets/deadline_configure_plugin.png differ diff --git a/website/docs/assets/deadline_fail.png b/website/docs/assets/deadline_fail.png new file mode 100644 index 0000000000..7758e35b01 Binary files /dev/null and b/website/docs/assets/deadline_fail.png differ diff --git a/website/docs/assets/ffmpeg_path.png b/website/docs/assets/ffmpeg_path.png new file mode 100644 index 0000000000..6bc99cc2ca Binary files /dev/null and b/website/docs/assets/ffmpeg_path.png differ diff --git a/website/docs/assets/ftrack/ftrack-api.gif b/website/docs/assets/ftrack/ftrack-api.gif new file mode 100644 index 0000000000..b2caf6ca98 Binary files /dev/null and b/website/docs/assets/ftrack/ftrack-api.gif differ diff --git a/website/docs/assets/ftrack/ftrack-api.png b/website/docs/assets/ftrack/ftrack-api.png new file mode 100644 index 0000000000..c69967d78e Binary files /dev/null and b/website/docs/assets/ftrack/ftrack-api.png differ diff --git a/website/docs/assets/ftrack/ftrack-api2.png b/website/docs/assets/ftrack/ftrack-api2.png new file mode 100644 index 0000000000..1653f8f603 Binary files /dev/null and b/website/docs/assets/ftrack/ftrack-api2.png differ diff --git a/website/docs/assets/ftrack/ftrack-api3.png b/website/docs/assets/ftrack/ftrack-api3.png new file mode 100644 index 0000000000..0ea03f5224 Binary files /dev/null and b/website/docs/assets/ftrack/ftrack-api3.png differ diff --git a/website/docs/assets/ftrack/ftrack-api4.png b/website/docs/assets/ftrack/ftrack-api4.png new file mode 100644 index 0000000000..3ee7551dce Binary files /dev/null and b/website/docs/assets/ftrack/ftrack-api4.png differ diff --git a/website/docs/assets/ftrack/ftrack-component_open-icon.png b/website/docs/assets/ftrack/ftrack-component_open-icon.png new file mode 100644 index 0000000000..1ea7a97d4c Binary files /dev/null and b/website/docs/assets/ftrack/ftrack-component_open-icon.png differ diff --git a/website/docs/assets/ftrack/ftrack-component_open_1-small.png b/website/docs/assets/ftrack/ftrack-component_open_1-small.png new file mode 100644 index 0000000000..6a783ab973 Binary files /dev/null and b/website/docs/assets/ftrack/ftrack-component_open_1-small.png differ diff --git a/website/docs/assets/ftrack/ftrack-component_open_1.png b/website/docs/assets/ftrack/ftrack-component_open_1.png new file mode 100644 index 0000000000..5fdf16a0a2 Binary files /dev/null and b/website/docs/assets/ftrack/ftrack-component_open_1.png differ diff --git a/website/docs/assets/ftrack/ftrack-create_folders-icon.png b/website/docs/assets/ftrack/ftrack-create_folders-icon.png new file mode 100644 index 0000000000..be88cf8993 Binary files /dev/null and b/website/docs/assets/ftrack/ftrack-create_folders-icon.png differ diff --git a/website/docs/assets/ftrack/ftrack-create_project_folders-icon.png b/website/docs/assets/ftrack/ftrack-create_project_folders-icon.png new file mode 100644 index 0000000000..06c689267a Binary files /dev/null and b/website/docs/assets/ftrack/ftrack-create_project_folders-icon.png differ diff --git a/website/docs/assets/ftrack/ftrack-custom_attrib_apps.jpg b/website/docs/assets/ftrack/ftrack-custom_attrib_apps.jpg new file mode 100644 index 0000000000..bbd4fd9842 Binary files /dev/null and b/website/docs/assets/ftrack/ftrack-custom_attrib_apps.jpg differ diff --git a/website/docs/assets/ftrack/ftrack-delete_asset-icon.png b/website/docs/assets/ftrack/ftrack-delete_asset-icon.png new file mode 100644 index 0000000000..2786ece2d0 Binary files /dev/null and b/website/docs/assets/ftrack/ftrack-delete_asset-icon.png differ diff --git a/website/docs/assets/ftrack/ftrack-delivery-icon.png b/website/docs/assets/ftrack/ftrack-delivery-icon.png new file mode 100644 index 0000000000..30775c9a50 Binary files /dev/null and b/website/docs/assets/ftrack/ftrack-delivery-icon.png differ diff --git a/website/docs/assets/ftrack/ftrack-login-api.png b/website/docs/assets/ftrack/ftrack-login-api.png new file mode 100644 index 0000000000..d47d46ce2f Binary files /dev/null and b/website/docs/assets/ftrack/ftrack-login-api.png differ diff --git a/website/docs/assets/ftrack/ftrack-login_1.png b/website/docs/assets/ftrack/ftrack-login_1.png new file mode 100644 index 0000000000..cc9b51b39b Binary files /dev/null and b/website/docs/assets/ftrack/ftrack-login_1.png differ diff --git a/website/docs/assets/ftrack/ftrack-login_2.png b/website/docs/assets/ftrack/ftrack-login_2.png new file mode 100644 index 0000000000..e654f70283 Binary files /dev/null and b/website/docs/assets/ftrack/ftrack-login_2.png differ diff --git a/website/docs/assets/ftrack/ftrack-login_3.png b/website/docs/assets/ftrack/ftrack-login_3.png new file mode 100644 index 0000000000..1677ea7f57 Binary files /dev/null and b/website/docs/assets/ftrack/ftrack-login_3.png differ diff --git a/website/docs/assets/ftrack/ftrack-login_50.png b/website/docs/assets/ftrack/ftrack-login_50.png new file mode 100644 index 0000000000..46d1b3c686 Binary files /dev/null and b/website/docs/assets/ftrack/ftrack-login_50.png differ diff --git a/website/docs/assets/ftrack/ftrack-login_60.png b/website/docs/assets/ftrack/ftrack-login_60.png new file mode 100644 index 0000000000..8302c198c3 Binary files /dev/null and b/website/docs/assets/ftrack/ftrack-login_60.png differ diff --git a/website/docs/assets/ftrack/ftrack-login_71-small.png b/website/docs/assets/ftrack/ftrack-login_71-small.png new file mode 100644 index 0000000000..0b66ed520d Binary files /dev/null and b/website/docs/assets/ftrack/ftrack-login_71-small.png differ diff --git a/website/docs/assets/ftrack/ftrack-login_71.png b/website/docs/assets/ftrack/ftrack-login_71.png new file mode 100644 index 0000000000..2331dcb4c2 Binary files /dev/null and b/website/docs/assets/ftrack/ftrack-login_71.png differ diff --git a/website/docs/assets/ftrack/ftrack-login_80-small.png b/website/docs/assets/ftrack/ftrack-login_80-small.png new file mode 100644 index 0000000000..6473e2bee6 Binary files /dev/null and b/website/docs/assets/ftrack/ftrack-login_80-small.png differ diff --git a/website/docs/assets/ftrack/ftrack-login_80.png b/website/docs/assets/ftrack/ftrack-login_80.png new file mode 100644 index 0000000000..67aecd1e84 Binary files /dev/null and b/website/docs/assets/ftrack/ftrack-login_80.png differ diff --git a/website/docs/assets/ftrack/ftrack-login_81.png b/website/docs/assets/ftrack/ftrack-login_81.png new file mode 100644 index 0000000000..ae145d6c73 Binary files /dev/null and b/website/docs/assets/ftrack/ftrack-login_81.png differ diff --git a/website/docs/assets/ftrack/ftrack-multiple_notes-icon.png b/website/docs/assets/ftrack/ftrack-multiple_notes-icon.png new file mode 100644 index 0000000000..3ebc240df7 Binary files /dev/null and b/website/docs/assets/ftrack/ftrack-multiple_notes-icon.png differ diff --git a/website/docs/assets/ftrack/ftrack-multiple_notes_1-small.png b/website/docs/assets/ftrack/ftrack-multiple_notes_1-small.png new file mode 100644 index 0000000000..d4f7bd865e Binary files /dev/null and b/website/docs/assets/ftrack/ftrack-multiple_notes_1-small.png differ diff --git a/website/docs/assets/ftrack/ftrack-multiple_notes_1.png b/website/docs/assets/ftrack/ftrack-multiple_notes_1.png new file mode 100644 index 0000000000..401f02e1ce Binary files /dev/null and b/website/docs/assets/ftrack/ftrack-multiple_notes_1.png differ diff --git a/website/docs/assets/ftrack/ftrack-prepare_project-icon.png b/website/docs/assets/ftrack/ftrack-prepare_project-icon.png new file mode 100644 index 0000000000..44facfa3ae Binary files /dev/null and b/website/docs/assets/ftrack/ftrack-prepare_project-icon.png differ diff --git a/website/docs/assets/ftrack/ftrack-prepare_project_1-small.png b/website/docs/assets/ftrack/ftrack-prepare_project_1-small.png new file mode 100644 index 0000000000..c2d1066ffd Binary files /dev/null and b/website/docs/assets/ftrack/ftrack-prepare_project_1-small.png differ diff --git a/website/docs/assets/ftrack/ftrack-prepare_project_1.png b/website/docs/assets/ftrack/ftrack-prepare_project_1.png new file mode 100644 index 0000000000..488ea5710d Binary files /dev/null and b/website/docs/assets/ftrack/ftrack-prepare_project_1.png differ diff --git a/website/docs/assets/ftrack/ftrack-pype_admin-icon.png b/website/docs/assets/ftrack/ftrack-pype_admin-icon.png new file mode 100644 index 0000000000..f8ccbe66db Binary files /dev/null and b/website/docs/assets/ftrack/ftrack-pype_admin-icon.png differ diff --git a/website/docs/assets/ftrack/ftrack-pype_doctor-icon.png b/website/docs/assets/ftrack/ftrack-pype_doctor-icon.png new file mode 100644 index 0000000000..23c408d373 Binary files /dev/null and b/website/docs/assets/ftrack/ftrack-pype_doctor-icon.png differ diff --git a/website/docs/assets/ftrack/ftrack-thumbnail-icon.png b/website/docs/assets/ftrack/ftrack-thumbnail-icon.png new file mode 100644 index 0000000000..f27cd96a3b Binary files /dev/null and b/website/docs/assets/ftrack/ftrack-thumbnail-icon.png differ diff --git a/website/docs/assets/ftrack/ftrack_logout.gif b/website/docs/assets/ftrack/ftrack_logout.gif new file mode 100644 index 0000000000..81088075c4 Binary files /dev/null and b/website/docs/assets/ftrack/ftrack_logout.gif differ diff --git a/website/docs/assets/harmony_creator.PNG b/website/docs/assets/harmony_creator.PNG new file mode 100644 index 0000000000..8593889c64 Binary files /dev/null and b/website/docs/assets/harmony_creator.PNG differ diff --git a/website/docs/assets/hiero_createUIFrames.png b/website/docs/assets/hiero_createUIFrames.png new file mode 100644 index 0000000000..798b3efb79 Binary files /dev/null and b/website/docs/assets/hiero_createUIFrames.png differ diff --git a/website/docs/assets/hiero_createUIRename.png b/website/docs/assets/hiero_createUIRename.png new file mode 100644 index 0000000000..3c02254559 Binary files /dev/null and b/website/docs/assets/hiero_createUIRename.png differ diff --git a/website/docs/assets/hiero_defaultTags.png b/website/docs/assets/hiero_defaultTags.png new file mode 100644 index 0000000000..225ec7d484 Binary files /dev/null and b/website/docs/assets/hiero_defaultTags.png differ diff --git a/website/docs/assets/hiero_instanceCreator.png b/website/docs/assets/hiero_instanceCreator.png new file mode 100644 index 0000000000..bcda6cdd18 Binary files /dev/null and b/website/docs/assets/hiero_instanceCreator.png differ diff --git a/website/docs/assets/hiero_menuColorspaceClip.png b/website/docs/assets/hiero_menuColorspaceClip.png new file mode 100644 index 0000000000..4014da2675 Binary files /dev/null and b/website/docs/assets/hiero_menuColorspaceClip.png differ diff --git a/website/docs/assets/hiero_menuColorspaceProject.png b/website/docs/assets/hiero_menuColorspaceProject.png new file mode 100644 index 0000000000..6b8e6e1b89 Binary files /dev/null and b/website/docs/assets/hiero_menuColorspaceProject.png differ diff --git a/website/docs/assets/hiero_menuCreate.png b/website/docs/assets/hiero_menuCreate.png new file mode 100644 index 0000000000..cbd816a203 Binary files /dev/null and b/website/docs/assets/hiero_menuCreate.png differ diff --git a/website/docs/assets/hiero_menuDefaultTags.png b/website/docs/assets/hiero_menuDefaultTags.png new file mode 100644 index 0000000000..ba542e1019 Binary files /dev/null and b/website/docs/assets/hiero_menuDefaultTags.png differ diff --git a/website/docs/assets/hiero_menuPublish.png b/website/docs/assets/hiero_menuPublish.png new file mode 100644 index 0000000000..4259dc44e4 Binary files /dev/null and b/website/docs/assets/hiero_menuPublish.png differ diff --git a/website/docs/assets/hiero_tagHandles.png b/website/docs/assets/hiero_tagHandles.png new file mode 100644 index 0000000000..84e7012088 Binary files /dev/null and b/website/docs/assets/hiero_tagHandles.png differ diff --git a/website/docs/assets/hiero_timelinePrep.png b/website/docs/assets/hiero_timelinePrep.png new file mode 100644 index 0000000000..253bc114ee Binary files /dev/null and b/website/docs/assets/hiero_timelinePrep.png differ diff --git a/website/docs/assets/maya-create_rs_proxy.jpg b/website/docs/assets/maya-create_rs_proxy.jpg new file mode 100644 index 0000000000..37680e6707 Binary files /dev/null and b/website/docs/assets/maya-create_rs_proxy.jpg differ diff --git a/website/docs/assets/maya-look_dev-look_manager.jpg b/website/docs/assets/maya-look_dev-look_manager.jpg new file mode 100644 index 0000000000..54dbf3f667 Binary files /dev/null and b/website/docs/assets/maya-look_dev-look_manager.jpg differ diff --git a/website/docs/assets/maya-look_dev-red_buddha.jpg b/website/docs/assets/maya-look_dev-red_buddha.jpg new file mode 100644 index 0000000000..e003a4ca55 Binary files /dev/null and b/website/docs/assets/maya-look_dev-red_buddha.jpg differ diff --git a/website/docs/assets/maya-model_create_instance.jpg b/website/docs/assets/maya-model_create_instance.jpg new file mode 100644 index 0000000000..0f32f0cb05 Binary files /dev/null and b/website/docs/assets/maya-model_create_instance.jpg differ diff --git a/website/docs/assets/maya-model_freeze_error_details.jpg b/website/docs/assets/maya-model_freeze_error_details.jpg new file mode 100644 index 0000000000..77b3cab005 Binary files /dev/null and b/website/docs/assets/maya-model_freeze_error_details.jpg differ diff --git a/website/docs/assets/maya-model_hierarchy_example.jpg b/website/docs/assets/maya-model_hierarchy_example.jpg new file mode 100644 index 0000000000..661722960c Binary files /dev/null and b/website/docs/assets/maya-model_hierarchy_example.jpg differ diff --git a/website/docs/assets/maya-model_loading.jpg b/website/docs/assets/maya-model_loading.jpg new file mode 100644 index 0000000000..7a7d911c06 Binary files /dev/null and b/website/docs/assets/maya-model_loading.jpg differ diff --git a/website/docs/assets/maya-model_pre_publish.jpg b/website/docs/assets/maya-model_pre_publish.jpg new file mode 100644 index 0000000000..b5827bdb5e Binary files /dev/null and b/website/docs/assets/maya-model_pre_publish.jpg differ diff --git a/website/docs/assets/maya-model_publish_error.jpg b/website/docs/assets/maya-model_publish_error.jpg new file mode 100644 index 0000000000..efe2aff9ba Binary files /dev/null and b/website/docs/assets/maya-model_publish_error.jpg differ diff --git a/website/docs/assets/maya-model_review_setup.jpg b/website/docs/assets/maya-model_review_setup.jpg new file mode 100644 index 0000000000..6c43807596 Binary files /dev/null and b/website/docs/assets/maya-model_review_setup.jpg differ diff --git a/website/docs/assets/maya-pointcache_loaded.png b/website/docs/assets/maya-pointcache_loaded.png new file mode 100644 index 0000000000..398ef5b7ac Binary files /dev/null and b/website/docs/assets/maya-pointcache_loaded.png differ diff --git a/website/docs/assets/maya-pointcache_setup.png b/website/docs/assets/maya-pointcache_setup.png new file mode 100644 index 0000000000..8904baa239 Binary files /dev/null and b/website/docs/assets/maya-pointcache_setup.png differ diff --git a/website/docs/assets/maya-render_publish_detail1.jpg b/website/docs/assets/maya-render_publish_detail1.jpg new file mode 100644 index 0000000000..bfb00ca98a Binary files /dev/null and b/website/docs/assets/maya-render_publish_detail1.jpg differ diff --git a/website/docs/assets/maya-render_setup.jpg b/website/docs/assets/maya-render_setup.jpg new file mode 100644 index 0000000000..4b1c52a593 Binary files /dev/null and b/website/docs/assets/maya-render_setup.jpg differ diff --git a/website/docs/assets/maya-render_setup_window.jpg b/website/docs/assets/maya-render_setup_window.jpg new file mode 100644 index 0000000000..9f4dce1ca9 Binary files /dev/null and b/website/docs/assets/maya-render_setup_window.jpg differ diff --git a/website/docs/assets/maya-renderglobals.jpg b/website/docs/assets/maya-renderglobals.jpg new file mode 100644 index 0000000000..8b4253f23b Binary files /dev/null and b/website/docs/assets/maya-renderglobals.jpg differ diff --git a/website/docs/assets/maya-rig_hierarchy_example.jpg b/website/docs/assets/maya-rig_hierarchy_example.jpg new file mode 100644 index 0000000000..5405005768 Binary files /dev/null and b/website/docs/assets/maya-rig_hierarchy_example.jpg differ diff --git a/website/docs/assets/maya-rig_model_setup.jpg b/website/docs/assets/maya-rig_model_setup.jpg new file mode 100644 index 0000000000..55450e225a Binary files /dev/null and b/website/docs/assets/maya-rig_model_setup.jpg differ diff --git a/website/docs/assets/maya-shortcut_buttons.jpg b/website/docs/assets/maya-shortcut_buttons.jpg new file mode 100644 index 0000000000..ead3b17b99 Binary files /dev/null and b/website/docs/assets/maya-shortcut_buttons.jpg differ diff --git a/website/docs/assets/maya-yeti_basic_setup.jpg b/website/docs/assets/maya-yeti_basic_setup.jpg new file mode 100644 index 0000000000..bdb0408757 Binary files /dev/null and b/website/docs/assets/maya-yeti_basic_setup.jpg differ diff --git a/website/docs/assets/maya-yeti_basic_setup_outline.jpg b/website/docs/assets/maya-yeti_basic_setup_outline.jpg new file mode 100644 index 0000000000..9b6dc18af1 Binary files /dev/null and b/website/docs/assets/maya-yeti_basic_setup_outline.jpg differ diff --git a/website/docs/assets/maya-yeti_cache_attributes.jpg b/website/docs/assets/maya-yeti_cache_attributes.jpg new file mode 100644 index 0000000000..5d5048bf9d Binary files /dev/null and b/website/docs/assets/maya-yeti_cache_attributes.jpg differ diff --git a/website/docs/assets/maya-yeti_rig.jpg b/website/docs/assets/maya-yeti_rig.jpg new file mode 100644 index 0000000000..07b13db409 Binary files /dev/null and b/website/docs/assets/maya-yeti_rig.jpg differ diff --git a/website/docs/assets/nks_icons/1_add_handles_end.png b/website/docs/assets/nks_icons/1_add_handles_end.png new file mode 100644 index 0000000000..4561745d66 Binary files /dev/null and b/website/docs/assets/nks_icons/1_add_handles_end.png differ diff --git a/website/docs/assets/nks_icons/2_add_handles.png b/website/docs/assets/nks_icons/2_add_handles.png new file mode 100644 index 0000000000..bb4c1802aa Binary files /dev/null and b/website/docs/assets/nks_icons/2_add_handles.png differ diff --git a/website/docs/assets/nks_icons/3_add_handles_start.png b/website/docs/assets/nks_icons/3_add_handles_start.png new file mode 100644 index 0000000000..c98e4f74f1 Binary files /dev/null and b/website/docs/assets/nks_icons/3_add_handles_start.png differ diff --git a/website/docs/assets/nks_icons/frame_start.png b/website/docs/assets/nks_icons/frame_start.png new file mode 100644 index 0000000000..af68df4722 Binary files /dev/null and b/website/docs/assets/nks_icons/frame_start.png differ diff --git a/website/docs/assets/nks_icons/hierarchy.png b/website/docs/assets/nks_icons/hierarchy.png new file mode 100644 index 0000000000..6acf39ced5 Binary files /dev/null and b/website/docs/assets/nks_icons/hierarchy.png differ diff --git a/website/docs/assets/nks_icons/lense.png b/website/docs/assets/nks_icons/lense.png new file mode 100644 index 0000000000..255b1753ed Binary files /dev/null and b/website/docs/assets/nks_icons/lense.png differ diff --git a/website/docs/assets/nks_icons/lense1.png b/website/docs/assets/nks_icons/lense1.png new file mode 100644 index 0000000000..1ad1264807 Binary files /dev/null and b/website/docs/assets/nks_icons/lense1.png differ diff --git a/website/docs/assets/nks_icons/reference.png b/website/docs/assets/nks_icons/reference.png new file mode 100644 index 0000000000..7517bd4105 Binary files /dev/null and b/website/docs/assets/nks_icons/reference.png differ diff --git a/website/docs/assets/nks_icons/resolution.png b/website/docs/assets/nks_icons/resolution.png new file mode 100644 index 0000000000..83803fc36d Binary files /dev/null and b/website/docs/assets/nks_icons/resolution.png differ diff --git a/website/docs/assets/nks_icons/retiming.png b/website/docs/assets/nks_icons/retiming.png new file mode 100644 index 0000000000..1c6f22e02c Binary files /dev/null and b/website/docs/assets/nks_icons/retiming.png differ diff --git a/website/docs/assets/nks_icons/review.png b/website/docs/assets/nks_icons/review.png new file mode 100644 index 0000000000..0d894b6987 Binary files /dev/null and b/website/docs/assets/nks_icons/review.png differ diff --git a/website/docs/assets/nks_icons/z_layer_bg.png b/website/docs/assets/nks_icons/z_layer_bg.png new file mode 100644 index 0000000000..51742b5df2 Binary files /dev/null and b/website/docs/assets/nks_icons/z_layer_bg.png differ diff --git a/website/docs/assets/nks_icons/z_layer_fg.png b/website/docs/assets/nks_icons/z_layer_fg.png new file mode 100644 index 0000000000..01e5f4f816 Binary files /dev/null and b/website/docs/assets/nks_icons/z_layer_fg.png differ diff --git a/website/docs/assets/nks_icons/z_layer_main.png b/website/docs/assets/nks_icons/z_layer_main.png new file mode 100644 index 0000000000..0ffb939a7f Binary files /dev/null and b/website/docs/assets/nks_icons/z_layer_main.png differ diff --git a/website/docs/assets/nuke_Create_errorVersionCheck.png b/website/docs/assets/nuke_Create_errorVersionCheck.png new file mode 100644 index 0000000000..85bcc26108 Binary files /dev/null and b/website/docs/assets/nuke_Create_errorVersionCheck.png differ diff --git a/website/docs/assets/nuke_Create_prerenderCustomNode.png b/website/docs/assets/nuke_Create_prerenderCustomNode.png new file mode 100644 index 0000000000..e44985ac1c Binary files /dev/null and b/website/docs/assets/nuke_Create_prerenderCustomNode.png differ diff --git a/website/docs/assets/nuke_Create_renderNodeDifferencesColor.png b/website/docs/assets/nuke_Create_renderNodeDifferencesColor.png new file mode 100644 index 0000000000..410455b6cb Binary files /dev/null and b/website/docs/assets/nuke_Create_renderNodeDifferencesColor.png differ diff --git a/website/docs/assets/nuke_Create_renderNodeDifferencesProperities.png b/website/docs/assets/nuke_Create_renderNodeDifferencesProperities.png new file mode 100644 index 0000000000..a37867b1b7 Binary files /dev/null and b/website/docs/assets/nuke_Create_renderNodeDifferencesProperities.png differ diff --git a/website/docs/assets/nuke_Create_renderNodeProperity.png b/website/docs/assets/nuke_Create_renderNodeProperity.png new file mode 100644 index 0000000000..2795b91ac8 Binary files /dev/null and b/website/docs/assets/nuke_Create_renderNodeProperity.png differ diff --git a/website/docs/assets/nuke_Create_renderNodeProperitySettings.png b/website/docs/assets/nuke_Create_renderNodeProperitySettings.png new file mode 100644 index 0000000000..ee98885216 Binary files /dev/null and b/website/docs/assets/nuke_Create_renderNodeProperitySettings.png differ diff --git a/website/docs/assets/nuke_Create_writeCustomPreRender.png b/website/docs/assets/nuke_Create_writeCustomPreRender.png new file mode 100644 index 0000000000..2345351f98 Binary files /dev/null and b/website/docs/assets/nuke_Create_writeCustomPreRender.png differ diff --git a/website/docs/assets/nuke_Create_writeMainPreRender.png b/website/docs/assets/nuke_Create_writeMainPreRender.png new file mode 100644 index 0000000000..e966444efe Binary files /dev/null and b/website/docs/assets/nuke_Create_writeMainPreRender.png differ diff --git a/website/docs/assets/nuke_Create_writeMainRender.png b/website/docs/assets/nuke_Create_writeMainRender.png new file mode 100644 index 0000000000..8827e5630b Binary files /dev/null and b/website/docs/assets/nuke_Create_writeMainRender.png differ diff --git a/website/docs/assets/nuke_Load_ReadProperities.png b/website/docs/assets/nuke_Load_ReadProperities.png new file mode 100644 index 0000000000..239ee89f25 Binary files /dev/null and b/website/docs/assets/nuke_Load_ReadProperities.png differ diff --git a/website/docs/assets/nuke_Load_ReadProperities1.png b/website/docs/assets/nuke_Load_ReadProperities1.png new file mode 100644 index 0000000000..76da12ef63 Binary files /dev/null and b/website/docs/assets/nuke_Load_ReadProperities1.png differ diff --git a/website/docs/assets/nuke_Load_subsetBrowser.png b/website/docs/assets/nuke_Load_subsetBrowser.png new file mode 100644 index 0000000000..5bfb5bd3eb Binary files /dev/null and b/website/docs/assets/nuke_Load_subsetBrowser.png differ diff --git a/website/docs/assets/nuke_Load_subsetBrowserSubmenu.png b/website/docs/assets/nuke_Load_subsetBrowserSubmenu.png new file mode 100644 index 0000000000..1a157008ee Binary files /dev/null and b/website/docs/assets/nuke_Load_subsetBrowserSubmenu.png differ diff --git a/website/docs/assets/nuke_ManageInventory_read.png b/website/docs/assets/nuke_ManageInventory_read.png new file mode 100644 index 0000000000..8234f27379 Binary files /dev/null and b/website/docs/assets/nuke_ManageInventory_read.png differ diff --git a/website/docs/assets/nuke_ManageInventory_read_latest.png b/website/docs/assets/nuke_ManageInventory_read_latest.png new file mode 100644 index 0000000000..db094c4a26 Binary files /dev/null and b/website/docs/assets/nuke_ManageInventory_read_latest.png differ diff --git a/website/docs/assets/nuke_ManageInventory_read_multipleInstances.png b/website/docs/assets/nuke_ManageInventory_read_multipleInstances.png new file mode 100644 index 0000000000..b5497c010e Binary files /dev/null and b/website/docs/assets/nuke_ManageInventory_read_multipleInstances.png differ diff --git a/website/docs/assets/nuke_ManageInventory_read_outdated.png b/website/docs/assets/nuke_ManageInventory_read_outdated.png new file mode 100644 index 0000000000..87531d1b4d Binary files /dev/null and b/website/docs/assets/nuke_ManageInventory_read_outdated.png differ diff --git a/website/docs/assets/nuke_Publish_prerender.png b/website/docs/assets/nuke_Publish_prerender.png new file mode 100644 index 0000000000..2ab3983db9 Binary files /dev/null and b/website/docs/assets/nuke_Publish_prerender.png differ diff --git a/website/docs/assets/nuke_Workfiles_saveAs.png b/website/docs/assets/nuke_Workfiles_saveAs.png new file mode 100644 index 0000000000..f9562628f2 Binary files /dev/null and b/website/docs/assets/nuke_Workfiles_saveAs.png differ diff --git a/website/docs/assets/nuke_Workfiles_selectLastFile.png b/website/docs/assets/nuke_Workfiles_selectLastFile.png new file mode 100644 index 0000000000..0f98cee0af Binary files /dev/null and b/website/docs/assets/nuke_Workfiles_selectLastFile.png differ diff --git a/website/docs/assets/nuke_applyAllSettings.png b/website/docs/assets/nuke_applyAllSettings.png new file mode 100644 index 0000000000..913140fed1 Binary files /dev/null and b/website/docs/assets/nuke_applyAllSettings.png differ diff --git a/website/docs/assets/nuke_autoBuild.png b/website/docs/assets/nuke_autoBuild.png new file mode 100644 index 0000000000..5effdd594d Binary files /dev/null and b/website/docs/assets/nuke_autoBuild.png differ diff --git a/website/docs/assets/nuke_buildFirstWorkfile.png b/website/docs/assets/nuke_buildFirstWorkfile.png new file mode 100644 index 0000000000..eab8f21a4a Binary files /dev/null and b/website/docs/assets/nuke_buildFirstWorkfile.png differ diff --git a/website/docs/assets/nuke_menu_ApplyAll.png b/website/docs/assets/nuke_menu_ApplyAll.png new file mode 100644 index 0000000000..c9e82b129e Binary files /dev/null and b/website/docs/assets/nuke_menu_ApplyAll.png differ diff --git a/website/docs/assets/nuke_menu_Create.png b/website/docs/assets/nuke_menu_Create.png new file mode 100644 index 0000000000..3823c7d870 Binary files /dev/null and b/website/docs/assets/nuke_menu_Create.png differ diff --git a/website/docs/assets/nuke_menu_Load.png b/website/docs/assets/nuke_menu_Load.png new file mode 100644 index 0000000000..c2ded15e53 Binary files /dev/null and b/website/docs/assets/nuke_menu_Load.png differ diff --git a/website/docs/assets/nuke_menu_Manage.png b/website/docs/assets/nuke_menu_Manage.png new file mode 100644 index 0000000000..71b7abd700 Binary files /dev/null and b/website/docs/assets/nuke_menu_Manage.png differ diff --git a/website/docs/assets/nuke_menu_Publish.png b/website/docs/assets/nuke_menu_Publish.png new file mode 100644 index 0000000000..38be1698b7 Binary files /dev/null and b/website/docs/assets/nuke_menu_Publish.png differ diff --git a/website/docs/assets/nuke_menu_SetContext.png b/website/docs/assets/nuke_menu_SetContext.png new file mode 100644 index 0000000000..83a31f7c7e Binary files /dev/null and b/website/docs/assets/nuke_menu_SetContext.png differ diff --git a/website/docs/assets/nuke_menu_Workfiles.png b/website/docs/assets/nuke_menu_Workfiles.png new file mode 100644 index 0000000000..030ce1b583 Binary files /dev/null and b/website/docs/assets/nuke_menu_Workfiles.png differ diff --git a/website/docs/assets/nuke_publish_renderMain.png b/website/docs/assets/nuke_publish_renderMain.png new file mode 100644 index 0000000000..bb499fb613 Binary files /dev/null and b/website/docs/assets/nuke_publish_renderMain.png differ diff --git a/website/docs/assets/nuke_setColorspace.png b/website/docs/assets/nuke_setColorspace.png new file mode 100644 index 0000000000..8895c30594 Binary files /dev/null and b/website/docs/assets/nuke_setColorspace.png differ diff --git a/website/docs/assets/nuke_setContext.png b/website/docs/assets/nuke_setContext.png new file mode 100644 index 0000000000..b9a6c79f29 Binary files /dev/null and b/website/docs/assets/nuke_setContext.png differ diff --git a/website/docs/assets/nuke_setFrameRanges.png b/website/docs/assets/nuke_setFrameRanges.png new file mode 100644 index 0000000000..0289fc4dba Binary files /dev/null and b/website/docs/assets/nuke_setFrameRanges.png differ diff --git a/website/docs/assets/nuke_setFrameRanges_timeline.png b/website/docs/assets/nuke_setFrameRanges_timeline.png new file mode 100644 index 0000000000..8e6b65c7ca Binary files /dev/null and b/website/docs/assets/nuke_setFrameRanges_timeline.png differ diff --git a/website/docs/assets/nuke_setResolution.png b/website/docs/assets/nuke_setResolution.png new file mode 100644 index 0000000000..3e40619884 Binary files /dev/null and b/website/docs/assets/nuke_setResolution.png differ diff --git a/website/docs/assets/photoshop_creator.PNG b/website/docs/assets/photoshop_creator.PNG new file mode 100644 index 0000000000..e72fe2f447 Binary files /dev/null and b/website/docs/assets/photoshop_creator.PNG differ diff --git a/website/docs/assets/photoshop_extension.PNG b/website/docs/assets/photoshop_extension.PNG new file mode 100644 index 0000000000..ef7081443d Binary files /dev/null and b/website/docs/assets/photoshop_extension.PNG differ diff --git a/website/docs/assets/photoshop_loader.PNG b/website/docs/assets/photoshop_loader.PNG new file mode 100644 index 0000000000..36eeadf084 Binary files /dev/null and b/website/docs/assets/photoshop_loader.PNG differ diff --git a/website/docs/assets/photoshop_loader_load.gif b/website/docs/assets/photoshop_loader_load.gif new file mode 100644 index 0000000000..efe051857b Binary files /dev/null and b/website/docs/assets/photoshop_loader_load.gif differ diff --git a/website/docs/assets/photoshop_manage.PNG b/website/docs/assets/photoshop_manage.PNG new file mode 100644 index 0000000000..64c424d94f Binary files /dev/null and b/website/docs/assets/photoshop_manage.PNG differ diff --git a/website/docs/assets/photoshop_manage_switch.gif b/website/docs/assets/photoshop_manage_switch.gif new file mode 100644 index 0000000000..de11b77e5c Binary files /dev/null and b/website/docs/assets/photoshop_manage_switch.gif differ diff --git a/website/docs/assets/photoshop_manage_update.gif b/website/docs/assets/photoshop_manage_update.gif new file mode 100644 index 0000000000..2c5d681cd3 Binary files /dev/null and b/website/docs/assets/photoshop_manage_update.gif differ diff --git a/website/docs/assets/photoshop_publish.PNG b/website/docs/assets/photoshop_publish.PNG new file mode 100644 index 0000000000..dc57757122 Binary files /dev/null and b/website/docs/assets/photoshop_publish.PNG differ diff --git a/website/docs/assets/photoshop_publish_actions.PNG b/website/docs/assets/photoshop_publish_actions.PNG new file mode 100644 index 0000000000..86083ad54b Binary files /dev/null and b/website/docs/assets/photoshop_publish_actions.PNG differ diff --git a/website/docs/assets/photoshop_publish_expand.PNG b/website/docs/assets/photoshop_publish_expand.PNG new file mode 100644 index 0000000000..6969b15647 Binary files /dev/null and b/website/docs/assets/photoshop_publish_expand.PNG differ diff --git a/website/docs/assets/photoshop_publish_inspect.PNG b/website/docs/assets/photoshop_publish_inspect.PNG new file mode 100644 index 0000000000..d2fd8922af Binary files /dev/null and b/website/docs/assets/photoshop_publish_inspect.PNG differ diff --git a/website/docs/assets/photoshop_publish_repair.gif b/website/docs/assets/photoshop_publish_repair.gif new file mode 100644 index 0000000000..bf7065801e Binary files /dev/null and b/website/docs/assets/photoshop_publish_repair.gif differ diff --git a/website/docs/assets/presets_plugins_extract_burnin_01.png b/website/docs/assets/presets_plugins_extract_burnin_01.png new file mode 100644 index 0000000000..03169c1c88 Binary files /dev/null and b/website/docs/assets/presets_plugins_extract_burnin_01.png differ diff --git a/website/docs/assets/settings/Screenshot_1.png b/website/docs/assets/settings/Screenshot_1.png new file mode 100644 index 0000000000..12c34e8f05 Binary files /dev/null and b/website/docs/assets/settings/Screenshot_1.png differ diff --git a/website/docs/assets/settings/anatomy_01.png b/website/docs/assets/settings/anatomy_01.png new file mode 100644 index 0000000000..92b7e880d6 Binary files /dev/null and b/website/docs/assets/settings/anatomy_01.png differ diff --git a/website/docs/assets/settings/anatomy_roots01.png b/website/docs/assets/settings/anatomy_roots01.png new file mode 100644 index 0000000000..56cda7345b Binary files /dev/null and b/website/docs/assets/settings/anatomy_roots01.png differ diff --git a/website/docs/assets/settings/anatomy_roots02.png b/website/docs/assets/settings/anatomy_roots02.png new file mode 100644 index 0000000000..e55793118f Binary files /dev/null and b/website/docs/assets/settings/anatomy_roots02.png differ diff --git a/website/docs/assets/settings/anatomy_roots03.png b/website/docs/assets/settings/anatomy_roots03.png new file mode 100644 index 0000000000..9c47a189fe Binary files /dev/null and b/website/docs/assets/settings/anatomy_roots03.png differ diff --git a/website/docs/assets/settings/applications_01.png b/website/docs/assets/settings/applications_01.png new file mode 100644 index 0000000000..52c31f6649 Binary files /dev/null and b/website/docs/assets/settings/applications_01.png differ diff --git a/website/docs/assets/settings/colours_01.png b/website/docs/assets/settings/colours_01.png new file mode 100644 index 0000000000..927c6e0a2b Binary files /dev/null and b/website/docs/assets/settings/colours_01.png differ diff --git a/website/docs/assets/settings/colours_02.png b/website/docs/assets/settings/colours_02.png new file mode 100644 index 0000000000..b304fa35f3 Binary files /dev/null and b/website/docs/assets/settings/colours_02.png differ diff --git a/website/docs/assets/settings/override_breadcrumbs.png b/website/docs/assets/settings/override_breadcrumbs.png new file mode 100644 index 0000000000..03eea756ef Binary files /dev/null and b/website/docs/assets/settings/override_breadcrumbs.png differ diff --git a/website/docs/assets/settings/studio_defaults.gif b/website/docs/assets/settings/studio_defaults.gif new file mode 100644 index 0000000000..4825e60501 Binary files /dev/null and b/website/docs/assets/settings/studio_defaults.gif differ diff --git a/website/docs/assets/settings/studio_defaults_remove.gif b/website/docs/assets/settings/studio_defaults_remove.gif new file mode 100644 index 0000000000..cedd9803d4 Binary files /dev/null and b/website/docs/assets/settings/studio_defaults_remove.gif differ diff --git a/website/docs/assets/settings/tools_01.png b/website/docs/assets/settings/tools_01.png new file mode 100644 index 0000000000..9837309994 Binary files /dev/null and b/website/docs/assets/settings/tools_01.png differ diff --git a/website/docs/assets/tools/tools_library_1-small.png b/website/docs/assets/tools/tools_library_1-small.png new file mode 100644 index 0000000000..557b432950 Binary files /dev/null and b/website/docs/assets/tools/tools_library_1-small.png differ diff --git a/website/docs/assets/tools/tools_library_2-small.png b/website/docs/assets/tools/tools_library_2-small.png new file mode 100644 index 0000000000..cf1c6728b0 Binary files /dev/null and b/website/docs/assets/tools/tools_library_2-small.png differ diff --git a/website/docs/assets/tools/tools_loader_1.png b/website/docs/assets/tools/tools_loader_1.png new file mode 100644 index 0000000000..5ec062884d Binary files /dev/null and b/website/docs/assets/tools/tools_loader_1.png differ diff --git a/website/docs/assets/tools/tools_loader_20.png b/website/docs/assets/tools/tools_loader_20.png new file mode 100644 index 0000000000..7bf032d1f9 Binary files /dev/null and b/website/docs/assets/tools/tools_loader_20.png differ diff --git a/website/docs/assets/tools/tools_loader_21.png b/website/docs/assets/tools/tools_loader_21.png new file mode 100644 index 0000000000..b27b2e65b8 Binary files /dev/null and b/website/docs/assets/tools/tools_loader_21.png differ diff --git a/website/docs/assets/tools/tools_loader_22.png b/website/docs/assets/tools/tools_loader_22.png new file mode 100644 index 0000000000..d855568bd8 Binary files /dev/null and b/website/docs/assets/tools/tools_loader_22.png differ diff --git a/website/docs/assets/tools/tools_loader_30-small.png b/website/docs/assets/tools/tools_loader_30-small.png new file mode 100644 index 0000000000..b71add78f1 Binary files /dev/null and b/website/docs/assets/tools/tools_loader_30-small.png differ diff --git a/website/docs/assets/tools/tools_loader_30.png b/website/docs/assets/tools/tools_loader_30.png new file mode 100644 index 0000000000..56f9f89061 Binary files /dev/null and b/website/docs/assets/tools/tools_loader_30.png differ diff --git a/website/docs/assets/tools/tools_loader_4-small.png b/website/docs/assets/tools/tools_loader_4-small.png new file mode 100644 index 0000000000..933da125e4 Binary files /dev/null and b/website/docs/assets/tools/tools_loader_4-small.png differ diff --git a/website/docs/assets/tools/tools_loader_4.png b/website/docs/assets/tools/tools_loader_4.png new file mode 100644 index 0000000000..47f221c4cc Binary files /dev/null and b/website/docs/assets/tools/tools_loader_4.png differ diff --git a/website/docs/assets/tools/tools_loader_40-small.png b/website/docs/assets/tools/tools_loader_40-small.png new file mode 100644 index 0000000000..a352dbf78a Binary files /dev/null and b/website/docs/assets/tools/tools_loader_40-small.png differ diff --git a/website/docs/assets/tools/tools_loader_40.png b/website/docs/assets/tools/tools_loader_40.png new file mode 100644 index 0000000000..b3ad681ce4 Binary files /dev/null and b/website/docs/assets/tools/tools_loader_40.png differ diff --git a/website/docs/assets/tools/tools_loader_41-small.png b/website/docs/assets/tools/tools_loader_41-small.png new file mode 100644 index 0000000000..8f8a27c649 Binary files /dev/null and b/website/docs/assets/tools/tools_loader_41-small.png differ diff --git a/website/docs/assets/tools/tools_loader_41.png b/website/docs/assets/tools/tools_loader_41.png new file mode 100644 index 0000000000..332634fffc Binary files /dev/null and b/website/docs/assets/tools/tools_loader_41.png differ diff --git a/website/docs/assets/tools/tools_loader_5-small.png b/website/docs/assets/tools/tools_loader_5-small.png new file mode 100644 index 0000000000..f2c5d53de1 Binary files /dev/null and b/website/docs/assets/tools/tools_loader_5-small.png differ diff --git a/website/docs/assets/tools/tools_loader_5.png b/website/docs/assets/tools/tools_loader_5.png new file mode 100644 index 0000000000..12eb38a806 Binary files /dev/null and b/website/docs/assets/tools/tools_loader_5.png differ diff --git a/website/docs/assets/tools/tools_loader_50.png b/website/docs/assets/tools/tools_loader_50.png new file mode 100644 index 0000000000..625adbc0a1 Binary files /dev/null and b/website/docs/assets/tools/tools_loader_50.png differ diff --git a/website/docs/assets/tools/tools_scene_inventory_10-small.png b/website/docs/assets/tools/tools_scene_inventory_10-small.png new file mode 100644 index 0000000000..52830b21e4 Binary files /dev/null and b/website/docs/assets/tools/tools_scene_inventory_10-small.png differ diff --git a/website/docs/assets/tools/tools_scene_inventory_20.png b/website/docs/assets/tools/tools_scene_inventory_20.png new file mode 100644 index 0000000000..ec92c4bd69 Binary files /dev/null and b/website/docs/assets/tools/tools_scene_inventory_20.png differ diff --git a/website/docs/assets/tools/tools_scene_inventory_30.png b/website/docs/assets/tools/tools_scene_inventory_30.png new file mode 100644 index 0000000000..27c1b05ffa Binary files /dev/null and b/website/docs/assets/tools/tools_scene_inventory_30.png differ diff --git a/website/docs/assets/tools/tools_scene_inventory_40.png b/website/docs/assets/tools/tools_scene_inventory_40.png new file mode 100644 index 0000000000..c835a0bc42 Binary files /dev/null and b/website/docs/assets/tools/tools_scene_inventory_40.png differ diff --git a/website/docs/assets/tools/tools_scene_inventory_50.png b/website/docs/assets/tools/tools_scene_inventory_50.png new file mode 100644 index 0000000000..282d5267f4 Binary files /dev/null and b/website/docs/assets/tools/tools_scene_inventory_50.png differ diff --git a/website/docs/assets/tools/tools_scene_inventory_54.png b/website/docs/assets/tools/tools_scene_inventory_54.png new file mode 100644 index 0000000000..bf9a034efd Binary files /dev/null and b/website/docs/assets/tools/tools_scene_inventory_54.png differ diff --git a/website/docs/assets/tools/tools_scene_inventory_55.png b/website/docs/assets/tools/tools_scene_inventory_55.png new file mode 100644 index 0000000000..d99dd6eace Binary files /dev/null and b/website/docs/assets/tools/tools_scene_inventory_55.png differ diff --git a/website/docs/assets/tools/tools_scene_inventory_60-small.png b/website/docs/assets/tools/tools_scene_inventory_60-small.png new file mode 100644 index 0000000000..a2ae8f138b Binary files /dev/null and b/website/docs/assets/tools/tools_scene_inventory_60-small.png differ diff --git a/website/docs/assets/tools/tools_scene_inventory_60.png b/website/docs/assets/tools/tools_scene_inventory_60.png new file mode 100644 index 0000000000..6dfd495ed9 Binary files /dev/null and b/website/docs/assets/tools/tools_scene_inventory_60.png differ diff --git a/website/docs/assets/tools/tools_scene_inventory_61-small.png b/website/docs/assets/tools/tools_scene_inventory_61-small.png new file mode 100644 index 0000000000..6edb1da148 Binary files /dev/null and b/website/docs/assets/tools/tools_scene_inventory_61-small.png differ diff --git a/website/docs/assets/tools/tools_scene_inventory_61.png b/website/docs/assets/tools/tools_scene_inventory_61.png new file mode 100644 index 0000000000..b270db3fca Binary files /dev/null and b/website/docs/assets/tools/tools_scene_inventory_61.png differ diff --git a/website/docs/assets/tools/tools_scene_inventory_62-small.png b/website/docs/assets/tools/tools_scene_inventory_62-small.png new file mode 100644 index 0000000000..8623a7c40e Binary files /dev/null and b/website/docs/assets/tools/tools_scene_inventory_62-small.png differ diff --git a/website/docs/assets/tools/tools_scene_inventory_62.png b/website/docs/assets/tools/tools_scene_inventory_62.png new file mode 100644 index 0000000000..122c3a9e25 Binary files /dev/null and b/website/docs/assets/tools/tools_scene_inventory_62.png differ diff --git a/website/docs/assets/tools/tools_scene_inventory_63-small.png b/website/docs/assets/tools/tools_scene_inventory_63-small.png new file mode 100644 index 0000000000..58c8bb4fae Binary files /dev/null and b/website/docs/assets/tools/tools_scene_inventory_63-small.png differ diff --git a/website/docs/assets/tools/tools_scene_inventory_63.png b/website/docs/assets/tools/tools_scene_inventory_63.png new file mode 100644 index 0000000000..5be8c8f70e Binary files /dev/null and b/website/docs/assets/tools/tools_scene_inventory_63.png differ diff --git a/website/docs/assets/tools_context_manager.png b/website/docs/assets/tools_context_manager.png new file mode 100644 index 0000000000..badf7b00bf Binary files /dev/null and b/website/docs/assets/tools_context_manager.png differ diff --git a/website/docs/assets/tools_subset_manager.png b/website/docs/assets/tools_subset_manager.png new file mode 100644 index 0000000000..6a1e48dbd5 Binary files /dev/null and b/website/docs/assets/tools_subset_manager.png differ diff --git a/website/docs/assets/unreal-avalon_tools.jpg b/website/docs/assets/unreal-avalon_tools.jpg new file mode 100644 index 0000000000..531fbe516a Binary files /dev/null and b/website/docs/assets/unreal-avalon_tools.jpg differ diff --git a/website/docs/assets/unreal-container.jpg b/website/docs/assets/unreal-container.jpg new file mode 100644 index 0000000000..f0c0a61e95 Binary files /dev/null and b/website/docs/assets/unreal-container.jpg differ diff --git a/website/docs/assets/workfiles_1.png b/website/docs/assets/workfiles_1.png new file mode 100644 index 0000000000..bcc74fb4b8 Binary files /dev/null and b/website/docs/assets/workfiles_1.png differ diff --git a/website/docs/assets/workfiles_2.png b/website/docs/assets/workfiles_2.png new file mode 100644 index 0000000000..1baf455587 Binary files /dev/null and b/website/docs/assets/workfiles_2.png differ diff --git a/website/docs/changelog.md b/website/docs/changelog.md new file mode 100644 index 0000000000..bec4a02173 --- /dev/null +++ b/website/docs/changelog.md @@ -0,0 +1,1043 @@ +--- +id: changelog +title: Changelog +sidebar_label: Changelog +--- + +## [2.16.0](https://github.com/pypeclub/pype/tree/2.16.0) + + _**release date:** 2021-03-22_ + +[Full Changelog](https://github.com/pypeclub/pype/compare/2.15.3...2.16.0) + +**Enhancements:** + +- Nuke: deadline submit limit group filter [\#1167](https://github.com/pypeclub/pype/pull/1167) +- Maya: support for Deadline Group and Limit Groups - backport 2.x [\#1156](https://github.com/pypeclub/pype/pull/1156) +- Maya: fixes for Redshift support [\#1152](https://github.com/pypeclub/pype/pull/1152) +- Nuke: adding preset for a Read node name to all img and mov Loaders [\#1146](https://github.com/pypeclub/pype/pull/1146) +- nuke deadline submit with environ var from presets overrides [\#1142](https://github.com/pypeclub/pype/pull/1142) +- Change timers after task change [\#1138](https://github.com/pypeclub/pype/pull/1138) +- Nuke: shortcuts for Pype menu [\#1127](https://github.com/pypeclub/pype/pull/1127) +- Nuke: workfile template [\#1124](https://github.com/pypeclub/pype/pull/1124) +- Sites local settings by site name [\#1117](https://github.com/pypeclub/pype/pull/1117) +- Reset loader's asset selection on context change [\#1106](https://github.com/pypeclub/pype/pull/1106) +- Bulk mov render publishing [\#1101](https://github.com/pypeclub/pype/pull/1101) +- Photoshop: mark publishable instances [\#1093](https://github.com/pypeclub/pype/pull/1093) +- Added ability to define BG color for extract review [\#1088](https://github.com/pypeclub/pype/pull/1088) +- TVPaint extractor enhancement [\#1080](https://github.com/pypeclub/pype/pull/1080) +- Photoshop: added support for .psb in workfiles [\#1078](https://github.com/pypeclub/pype/pull/1078) +- Optionally add task to subset name [\#1072](https://github.com/pypeclub/pype/pull/1072) +- Only extend clip range when collecting. [\#1008](https://github.com/pypeclub/pype/pull/1008) +- Collect audio for farm reviews. [\#1073](https://github.com/pypeclub/pype/pull/1073) + + +**Fixed bugs:** + +- Fix path spaces in jpeg extractor [\#1174](https://github.com/pypeclub/pype/pull/1174) +- Maya: Bugfix: superclass for CreateCameraRig [\#1166](https://github.com/pypeclub/pype/pull/1166) +- Maya: Submit to Deadline - fix typo in condition [\#1163](https://github.com/pypeclub/pype/pull/1163) +- Avoid dot in repre extension [\#1125](https://github.com/pypeclub/pype/pull/1125) +- Fix versions variable usage in standalone publisher [\#1090](https://github.com/pypeclub/pype/pull/1090) +- Collect instance data fix subset query [\#1082](https://github.com/pypeclub/pype/pull/1082) +- Fix getting the camera name. [\#1067](https://github.com/pypeclub/pype/pull/1067) +- Nuke: Ensure "NUKE\_TEMP\_DIR" is not part of the Deadline job environment. [\#1064](https://github.com/pypeclub/pype/pull/1064) + +### [2.15.3](https://github.com/pypeclub/pype/tree/2.15.3) + + _**release date:** 2021-02-26_ + +[Full Changelog](https://github.com/pypeclub/pype/compare/2.15.2...2.15.3) + +**Enhancements:** + +- Maya: speedup renderable camera collection [\#1053](https://github.com/pypeclub/pype/pull/1053) +- Harmony - add regex search to filter allowed task names for collectin… [\#1047](https://github.com/pypeclub/pype/pull/1047) + +**Fixed bugs:** + +- Ftrack integrate hierarchy fix [\#1085](https://github.com/pypeclub/pype/pull/1085) +- Explicit subset filter in anatomy instance data [\#1059](https://github.com/pypeclub/pype/pull/1059) +- TVPaint frame offset [\#1057](https://github.com/pypeclub/pype/pull/1057) +- Auto fix unicode strings [\#1046](https://github.com/pypeclub/pype/pull/1046) + +### [2.15.2](https://github.com/pypeclub/pype/tree/2.15.2) + + _**release date:** 2021-02-19_ + +[Full Changelog](https://github.com/pypeclub/pype/compare/2.15.1...2.15.2) + +**Enhancements:** + +- Maya: Vray scene publishing [\#1013](https://github.com/pypeclub/pype/pull/1013) + +**Fixed bugs:** + +- Fix entity move under project [\#1040](https://github.com/pypeclub/pype/pull/1040) +- smaller nuke fixes from production [\#1036](https://github.com/pypeclub/pype/pull/1036) +- TVPaint thumbnail extract fix [\#1031](https://github.com/pypeclub/pype/pull/1031) + +### [2.15.1](https://github.com/pypeclub/pype/tree/2.15.1) + + _**release date:** 2021-02-12_ + +[Full Changelog](https://github.com/pypeclub/pype/compare/2.15.0...2.15.1) + +**Enhancements:** + +- Delete version as loader action [\#1011](https://github.com/pypeclub/pype/pull/1011) +- Delete old versions [\#445](https://github.com/pypeclub/pype/pull/445) + +**Fixed bugs:** + +- PS - remove obsolete functions from pywin32 [\#1006](https://github.com/pypeclub/pype/pull/1006) +- Clone description of review session objects. [\#922](https://github.com/pypeclub/pype/pull/922) + +## [2.15.0](https://github.com/pypeclub/pype/tree/2.15.0) + + _**release date:** 2021-02-09_ + +[Full Changelog](https://github.com/pypeclub/pype/compare/2.14.6...2.15.0) + +**Enhancements:** + +- Resolve - loading and updating clips [\#932](https://github.com/pypeclub/pype/pull/932) +- Release/2.15.0 [\#926](https://github.com/pypeclub/pype/pull/926) +- Photoshop: add option for template.psd and prelaunch hook [\#894](https://github.com/pypeclub/pype/pull/894) +- Nuke: deadline presets [\#993](https://github.com/pypeclub/pype/pull/993) +- Maya: Alembic only set attributes that exists. [\#986](https://github.com/pypeclub/pype/pull/986) +- Harmony: render local and handle fixes [\#981](https://github.com/pypeclub/pype/pull/981) +- PSD Bulk export of ANIM group [\#965](https://github.com/pypeclub/pype/pull/965) +- AE - added prelaunch hook for opening last or workfile from template [\#944](https://github.com/pypeclub/pype/pull/944) +- PS - safer handling of loading of workfile [\#941](https://github.com/pypeclub/pype/pull/941) +- Maya: Handling Arnold referenced AOVs [\#938](https://github.com/pypeclub/pype/pull/938) +- TVPaint: switch layer IDs for layer names during identification [\#903](https://github.com/pypeclub/pype/pull/903) +- TVPaint audio/sound loader [\#893](https://github.com/pypeclub/pype/pull/893) +- Clone review session with children. [\#891](https://github.com/pypeclub/pype/pull/891) +- Simple compositing data packager for freelancers [\#884](https://github.com/pypeclub/pype/pull/884) +- Harmony deadline submission [\#881](https://github.com/pypeclub/pype/pull/881) +- Maya: Optionally hide image planes from reviews. [\#840](https://github.com/pypeclub/pype/pull/840) +- Maya: handle referenced AOVs for Vray [\#824](https://github.com/pypeclub/pype/pull/824) +- DWAA/DWAB support on windows [\#795](https://github.com/pypeclub/pype/pull/795) +- Unreal: animation, layout and setdress updates [\#695](https://github.com/pypeclub/pype/pull/695) + +**Fixed bugs:** + +- Maya: Looks - disable hardlinks [\#995](https://github.com/pypeclub/pype/pull/995) +- Fix Ftrack custom attribute update [\#982](https://github.com/pypeclub/pype/pull/982) +- Prores ks in burnin script [\#960](https://github.com/pypeclub/pype/pull/960) +- terminal.py crash on import [\#839](https://github.com/pypeclub/pype/pull/839) +- Extract review handle bizarre pixel aspect ratio [\#990](https://github.com/pypeclub/pype/pull/990) +- Nuke: add nuke related env var to sumbission [\#988](https://github.com/pypeclub/pype/pull/988) +- Nuke: missing preset's variable [\#984](https://github.com/pypeclub/pype/pull/984) +- Get creator by name fix [\#979](https://github.com/pypeclub/pype/pull/979) +- Fix update of project's tasks on Ftrack sync [\#972](https://github.com/pypeclub/pype/pull/972) +- nuke: wrong frame offset in mov loader [\#971](https://github.com/pypeclub/pype/pull/971) +- Create project structure action fix multiroot [\#967](https://github.com/pypeclub/pype/pull/967) +- PS: remove pywin installation from hook [\#964](https://github.com/pypeclub/pype/pull/964) +- Prores ks in burnin script [\#959](https://github.com/pypeclub/pype/pull/959) +- Subset family is now stored in subset document [\#956](https://github.com/pypeclub/pype/pull/956) +- DJV new version arguments [\#954](https://github.com/pypeclub/pype/pull/954) +- TV Paint: Fix single frame Sequence [\#953](https://github.com/pypeclub/pype/pull/953) +- nuke: missing `file` knob update [\#933](https://github.com/pypeclub/pype/pull/933) +- Photoshop: Create from single layer was failing [\#920](https://github.com/pypeclub/pype/pull/920) +- Nuke: baking mov with correct colorspace inherited from write [\#909](https://github.com/pypeclub/pype/pull/909) +- Launcher fix actions discover [\#896](https://github.com/pypeclub/pype/pull/896) +- Get the correct file path for the updated mov. [\#889](https://github.com/pypeclub/pype/pull/889) +- Maya: Deadline submitter - shared data access violation [\#831](https://github.com/pypeclub/pype/pull/831) +- Maya: Take into account vray master AOV switch [\#822](https://github.com/pypeclub/pype/pull/822) + +**Merged pull requests:** + +- Refactor blender to 3.0 format [\#934](https://github.com/pypeclub/pype/pull/934) + +### [2.14.6](https://github.com/pypeclub/pype/tree/2.14.6) + + _**release date:** 2021-01-15_ + +[Full Changelog](https://github.com/pypeclub/pype/compare/2.14.5...2.14.6) + +**Fixed bugs:** + +- Nuke: improving of hashing path [\#885](https://github.com/pypeclub/pype/pull/885) + +**Merged pull requests:** + +- Hiero: cut videos with correct secons [\#892](https://github.com/pypeclub/pype/pull/892) +- Faster sync to avalon preparation [\#869](https://github.com/pypeclub/pype/pull/869) + +### [2.14.5](https://github.com/pypeclub/pype/tree/2.14.5) + + _**release date:** 2021-01-06_ + +[Full Changelog](https://github.com/pypeclub/pype/compare/2.14.4...2.14.5) + +**Merged pull requests:** + +- Pype logger refactor [\#866](https://github.com/pypeclub/pype/pull/866) + +### [2.14.4](https://github.com/pypeclub/pype/tree/2.14.4) + + _**release date:** 2020-12-18_ + +[Full Changelog](https://github.com/pypeclub/pype/compare/2.14.3...2.14.4) + +**Merged pull requests:** + +- Fix - AE - added explicit cast to int [\#837](https://github.com/pypeclub/pype/pull/837) + +### [2.14.3](https://github.com/pypeclub/pype/tree/2.14.3) + + _**release date:** 2020-12-16_ + +[Full Changelog](https://github.com/pypeclub/pype/compare/2.14.2...2.14.3) + +**Fixed bugs:** + +- TVPaint repair invalid metadata [\#809](https://github.com/pypeclub/pype/pull/809) +- Feature/push hier value to nonhier action [\#807](https://github.com/pypeclub/pype/pull/807) +- Harmony: fix palette and image sequence loader [\#806](https://github.com/pypeclub/pype/pull/806) + +**Merged pull requests:** + +- respecting space in path [\#823](https://github.com/pypeclub/pype/pull/823) + +### [2.14.2](https://github.com/pypeclub/pype/tree/2.14.2) + + _**release date:** 2020-12-04_ + +[Full Changelog](https://github.com/pypeclub/pype/compare/2.14.1...2.14.2) + +**Enhancements:** + +- Collapsible wrapper in settings [\#767](https://github.com/pypeclub/pype/pull/767) + +**Fixed bugs:** + +- Harmony: template extraction and palettes thumbnails on mac [\#768](https://github.com/pypeclub/pype/pull/768) +- TVPaint store context to workfile metadata \(764\) [\#766](https://github.com/pypeclub/pype/pull/766) +- Extract review audio cut fix [\#763](https://github.com/pypeclub/pype/pull/763) + +**Merged pull requests:** + +- AE: fix publish after background load [\#781](https://github.com/pypeclub/pype/pull/781) +- TVPaint store members key [\#769](https://github.com/pypeclub/pype/pull/769) + +### [2.14.1](https://github.com/pypeclub/pype/tree/2.14.1) + + _**release date:** 2020-11-27_ + +[Full Changelog](https://github.com/pypeclub/pype/compare/2.14.0...2.14.1) + +**Enhancements:** + +- Settings required keys in modifiable dict [\#770](https://github.com/pypeclub/pype/pull/770) +- Extract review may not add audio to output [\#761](https://github.com/pypeclub/pype/pull/761) + +**Fixed bugs:** + +- After Effects: frame range, file format and render source scene fixes [\#760](https://github.com/pypeclub/pype/pull/760) +- Hiero: trimming review with clip event number [\#754](https://github.com/pypeclub/pype/pull/754) +- TVPaint: fix updating of loaded subsets [\#752](https://github.com/pypeclub/pype/pull/752) +- Maya: Vray handling of default aov [\#748](https://github.com/pypeclub/pype/pull/748) +- Maya: multiple renderable cameras in layer didn't work [\#744](https://github.com/pypeclub/pype/pull/744) +- Ftrack integrate custom attributes fix [\#742](https://github.com/pypeclub/pype/pull/742) + + + +## [2.14.0](https://github.com/pypeclub/pype/tree/2.14.0) + + _**release date:** 2020-11-24_ + +[Full Changelog](https://github.com/pypeclub/pype/compare/2.13.7...2.14.0) + +**Enhancements:** + +- Ftrack: Event for syncing shot or asset status with tasks.[\#736](https://github.com/pypeclub/pype/pull/736) +- Maya: add camera rig publishing option [\#721](https://github.com/pypeclub/pype/pull/721) +- Maya: Ask user to select non-default camera from scene or create a new. [\#678](https://github.com/pypeclub/pype/pull/678) +- Maya: Camera name can be added to burnins. [\#674](https://github.com/pypeclub/pype/pull/674) +- Sort instances by label in pyblish gui [\#719](https://github.com/pypeclub/pype/pull/719) +- Synchronize ftrack hierarchical and shot attributes [\#716](https://github.com/pypeclub/pype/pull/716) +- Standalone Publisher: Publish editorial from separate image sequences [\#699](https://github.com/pypeclub/pype/pull/699) +- Render publish plugins abstraction [\#687](https://github.com/pypeclub/pype/pull/687) +- TV Paint: image loader with options [\#675](https://github.com/pypeclub/pype/pull/675) +- **TV Paint (Beta):** initial implementation of creators and local rendering [\#693](https://github.com/pypeclub/pype/pull/693) +- **After Effects (Beta):** base integration with loaders [\#667](https://github.com/pypeclub/pype/pull/667) +- Harmony: Javascript refactoring and overall stability improvements [\#666](https://github.com/pypeclub/pype/pull/666) + +**Fixed bugs:** + +- TVPaint: extract review fix [\#740](https://github.com/pypeclub/pype/pull/740) +- After Effects: Review were not being sent to ftrack [\#738](https://github.com/pypeclub/pype/pull/738) +- Maya: vray proxy was not loading [\#722](https://github.com/pypeclub/pype/pull/722) +- Maya: Vray expected file fixes [\#682](https://github.com/pypeclub/pype/pull/682) + +**Deprecated:** + +- Removed artist view from pyblish gui [\#717](https://github.com/pypeclub/pype/pull/717) +- Maya: disable legacy override check for cameras [\#715](https://github.com/pypeclub/pype/pull/715) + + + + +### [2.13.7](https://github.com/pypeclub/pype/tree/2.13.7) + + _**release date:** 2020-11-19_ + +[Full Changelog](https://github.com/pypeclub/pype/compare/2.13.6...2.13.7) + +**Merged pull requests:** + +- fix\(SP\): getting fps from context instead of nonexistent entity [\#729](https://github.com/pypeclub/pype/pull/729) + + + + +### [2.13.6](https://github.com/pypeclub/pype/tree/2.13.6) + + _**release date:** 2020-11-15_ + +[Full Changelog](https://github.com/pypeclub/pype/compare/2.13.5...2.13.6) + +**Fixed bugs:** + +- Maya workfile version wasn't syncing with renders properly [\#711](https://github.com/pypeclub/pype/pull/711) +- Maya: Fix for publishing multiple cameras with review from the same scene [\#710](https://github.com/pypeclub/pype/pull/710) + + + + +### [2.13.5](https://github.com/pypeclub/pype/tree/2.13.5) + + _**release date:** 2020-11-12_ + +[Full Changelog](https://github.com/pypeclub/pype/compare/2.13.4...2.13.5) + + +**Fixed bugs:** + +- Wrong thumbnail file was picked when publishing sequence in standalone publisher [\#703](https://github.com/pypeclub/pype/pull/703) +- Fix: Burnin data pass and FFmpeg tool check [\#701](https://github.com/pypeclub/pype/pull/701) + + + + +### [2.13.4](https://github.com/pypeclub/pype/tree/2.13.4) + + _**release date:** 2020-11-09_ + +[Full Changelog](https://github.com/pypeclub/pype/compare/2.13.3...2.13.4) + + +**Fixed bugs:** + +- Photoshop unhiding hidden layers [\#688](https://github.com/pypeclub/pype/issues/688) +- Nuke: Favorite directories "shot dir" "project dir" - not working \#684 [\#685](https://github.com/pypeclub/pype/pull/685) + + + + + +### [2.13.3](https://github.com/pypeclub/pype/tree/2.13.3) + + _**release date:** _2020-11-03_ + +[Full Changelog](https://github.com/pypeclub/pype/compare/2.13.2...2.13.3) + +**Fixed bugs:** + +- Fix ffmpeg executable path with spaces [\#680](https://github.com/pypeclub/pype/pull/680) +- Hotfix: Added default version number [\#679](https://github.com/pypeclub/pype/pull/679) + + + + +### [2.13.2](https://github.com/pypeclub/pype/tree/2.13.2) + + _**release date:** 2020-10-28_ + +[Full Changelog](https://github.com/pypeclub/pype/compare/2.13.1...2.13.2) + +**Fixed bugs:** + +- Nuke: wrong conditions when fixing legacy write nodes [\#665](https://github.com/pypeclub/pype/pull/665) + + + + +### [2.13.1](https://github.com/pypeclub/pype/tree/2.13.1) + + _**release date:** 2020-10-23_ + +[Full Changelog](https://github.com/pypeclub/pype/compare/2.13.0...2.13.1) + +**Fixed bugs:** + +- Photoshop: Layer name is not propagating to metadata [\#654](https://github.com/pypeclub/pype/issues/654) +- Photoshop: Loader in fails with "can't set attribute" [\#650](https://github.com/pypeclub/pype/issues/650) +- Hiero: Review video file adding one frame to the end [\#659](https://github.com/pypeclub/pype/issues/659) + + + +## [2.13.0](https://github.com/pypeclub/pype/tree/2.13.0) + + _**release date:** 2020-10-16_ + +[Full Changelog](https://github.com/pypeclub/pype/compare/2.12.5...2.13.0) + +**Enhancements:** + +- Deadline Output Folder [\#636](https://github.com/pypeclub/pype/issues/636) +- Nuke Camera Loader [\#565](https://github.com/pypeclub/pype/issues/565) +- Deadline publish job shows publishing output folder [\#649](https://github.com/pypeclub/pype/pull/649) +- Get latest version in lib [\#642](https://github.com/pypeclub/pype/pull/642) +- Improved publishing of multiple representation from SP [\#638](https://github.com/pypeclub/pype/pull/638) +- TvPaint: launch shot work file from within Ftrack [\#631](https://github.com/pypeclub/pype/pull/631) +- Add mp4 support for RV action. [\#628](https://github.com/pypeclub/pype/pull/628) +- Maya: allow renders to have version synced with workfile [\#618](https://github.com/pypeclub/pype/pull/618) +- Renaming nukestudio host folder to hiero [\#617](https://github.com/pypeclub/pype/pull/617) +- Harmony: More efficient publishing [\#615](https://github.com/pypeclub/pype/pull/615) +- Ftrack server action improvement [\#608](https://github.com/pypeclub/pype/pull/608) +- Deadline user defaults to pype username if present [\#607](https://github.com/pypeclub/pype/pull/607) +- Standalone publisher now has icon [\#606](https://github.com/pypeclub/pype/pull/606) +- Nuke render write targeting knob improvement [\#603](https://github.com/pypeclub/pype/pull/603) +- Animated pyblish gui [\#602](https://github.com/pypeclub/pype/pull/602) +- Maya: Deadline - make use of asset dependencies optional [\#591](https://github.com/pypeclub/pype/pull/591) +- Nuke: Publishing, loading and updating alembic cameras [\#575](https://github.com/pypeclub/pype/pull/575) +- Maya: add look assigner to pype menu even if scriptsmenu is not available [\#573](https://github.com/pypeclub/pype/pull/573) +- Store task types in the database [\#572](https://github.com/pypeclub/pype/pull/572) +- Maya: Tiled EXRs to scanline EXRs render option [\#512](https://github.com/pypeclub/pype/pull/512) +- Fusion: basic integration refresh [\#452](https://github.com/pypeclub/pype/pull/452) + +**Fixed bugs:** + +- Burnin script did not propagate ffmpeg output [\#640](https://github.com/pypeclub/pype/issues/640) +- Pyblish-pype spacer in terminal wasn't transparent [\#646](https://github.com/pypeclub/pype/pull/646) +- Lib subprocess without logger [\#645](https://github.com/pypeclub/pype/pull/645) +- Nuke: prevent crash if we only have single frame in sequence [\#644](https://github.com/pypeclub/pype/pull/644) +- Burnin script logs better output [\#641](https://github.com/pypeclub/pype/pull/641) +- Missing audio on farm submission. [\#639](https://github.com/pypeclub/pype/pull/639) +- review from imagesequence error [\#633](https://github.com/pypeclub/pype/pull/633) +- Hiero: wrong order of fps clip instance data collecting [\#627](https://github.com/pypeclub/pype/pull/627) +- Add source for review instances. [\#625](https://github.com/pypeclub/pype/pull/625) +- Task processing in event sync [\#623](https://github.com/pypeclub/pype/pull/623) +- sync to avalon doesn t remove renamed task [\#619](https://github.com/pypeclub/pype/pull/619) +- Intent publish setting wasn't working with default value [\#562](https://github.com/pypeclub/pype/pull/562) +- Maya: Updating a look where the shader name changed, leaves the geo without a shader [\#514](https://github.com/pypeclub/pype/pull/514) + + +### [2.12.5](https://github.com/pypeclub/pype/tree/2.12.5) + +_**release date:** 2020-10-14_ + +[Full Changelog](https://github.com/pypeclub/pype/compare/2.12.4...2.12.5) + +**Fixed Bugs:** + +- Harmony: Disable application launch logic [\#637](https://github.com/pypeclub/pype/pull/637) + +### [2.12.4](https://github.com/pypeclub/pype/tree/2.12.4) + +_**release date:** 2020-10-08_ + +[Full Changelog](https://github.com/pypeclub/pype/compare/2.12.3...2.12.4) + +**Fixed bugs:** + +- Sync to avalon doesn't remove renamed task [\#605](https://github.com/pypeclub/pype/issues/605) + + +**Merged pull requests:** + +- NukeStudio: small fixes [\#622](https://github.com/pypeclub/pype/pull/622) +- NukeStudio: broken order of plugins [\#620](https://github.com/pypeclub/pype/pull/620) + +### [2.12.3](https://github.com/pypeclub/pype/tree/2.12.3) + +_**release date:** 2020-10-06_ + +[Full Changelog](https://github.com/pypeclub/pype/compare/2.12.2...2.12.3) + +**Fixed bugs:** + +- Harmony: empty scene contamination [\#583](https://github.com/pypeclub/pype/issues/583) +- Edit publishing in SP doesn't respect shot selection for publishing [\#542](https://github.com/pypeclub/pype/issues/542) +- Pathlib breaks compatibility with python2 hosts [\#281](https://github.com/pypeclub/pype/issues/281) +- Maya: fix maya scene type preset exception [\#569](https://github.com/pypeclub/pype/pull/569) +- Standalone publisher editorial plugins interfering [\#580](https://github.com/pypeclub/pype/pull/580) + +### [2.12.2](https://github.com/pypeclub/pype/tree/2.12.2) + +_**release date:** 2020-09-25_ + +[Full Changelog](https://github.com/pypeclub/pype/compare/2.12.1...2.12.2) + +**Fixed bugs:** + +- Harmony: Saving heavy scenes will crash [\#507](https://github.com/pypeclub/pype/issues/507) +- Extract review a representation name with `\*\_burnin` [\#388](https://github.com/pypeclub/pype/issues/388) +- Hierarchy data was not considering active instances [\#551](https://github.com/pypeclub/pype/pull/551) + +### [2.12.1](https://github.com/pypeclub/pype/tree/2.12.1) + +_**release date:** 2020-09-15_ + +[Full Changelog](https://github.com/pypeclub/pype/compare/2.12.0...2.12.1) + +**Fixed bugs:** + +- dependency security alert ! [\#484](https://github.com/pypeclub/pype/issues/484) +- Maya: RenderSetup is missing update [\#106](https://github.com/pypeclub/pype/issues/106) +- \ extract effects creates new instance [\#78](https://github.com/pypeclub/pype/issues/78) + + + + +## [2.12.0](https://github.com/pypeclub/pype/tree/2.12.0) ## + +_**release date:** 09 Sept 2020_ + +[Full Changelog](https://github.com/pypeclub/pype/compare/2.11.8...2.12.0) + +**Enhancements:** + +- Pype now uses less mongo connections [\#509](https://github.com/pypeclub/pype/pull/509) +- Nuke: adding image loader [\#499](https://github.com/pypeclub/pype/pull/499) +- Completely new application launcher [\#443](https://github.com/pypeclub/pype/pull/443) +- Maya: Optional skip review on renders. [\#441](https://github.com/pypeclub/pype/pull/441) +- Ftrack: Option to push status from task to latest version [\#440](https://github.com/pypeclub/pype/pull/440) +- Maya: Properly containerize image plane loads. [\#434](https://github.com/pypeclub/pype/pull/434) +- Option to keep the review files. [\#426](https://github.com/pypeclub/pype/pull/426) +- Maya: Isolate models during preview publishing [\#425](https://github.com/pypeclub/pype/pull/425) +- Ftrack attribute group is backwards compatible [\#418](https://github.com/pypeclub/pype/pull/418) +- Maya: Publishing of tile renderings on Deadline [\#398](https://github.com/pypeclub/pype/pull/398) +- Slightly better logging gui [\#383](https://github.com/pypeclub/pype/pull/383) +- Standalonepublisher: editorial family features expansion [\#411](https://github.com/pypeclub/pype/pull/411) + +**Fixed bugs:** + +- Maya: Fix tile order for Draft Tile Assembler [\#511](https://github.com/pypeclub/pype/pull/511) +- Remove extra dash [\#501](https://github.com/pypeclub/pype/pull/501) +- Fix: strip dot from repre names in single frame renders [\#498](https://github.com/pypeclub/pype/pull/498) +- Better handling of destination during integrating [\#485](https://github.com/pypeclub/pype/pull/485) +- Fix: allow thumbnail creation for single frame renders [\#460](https://github.com/pypeclub/pype/pull/460) +- added missing argument to launch\_application in ftrack app handler [\#453](https://github.com/pypeclub/pype/pull/453) +- Burnins: Copy bit rate of input video to match quality. [\#448](https://github.com/pypeclub/pype/pull/448) +- Standalone publisher is now independent from tray [\#442](https://github.com/pypeclub/pype/pull/442) +- Bugfix/empty enumerator attributes [\#436](https://github.com/pypeclub/pype/pull/436) +- Fixed wrong order of "other" category collapssing in publisher [\#435](https://github.com/pypeclub/pype/pull/435) +- Multiple reviews where being overwritten to one. [\#424](https://github.com/pypeclub/pype/pull/424) +- Cleanup plugin fail on instances without staging dir [\#420](https://github.com/pypeclub/pype/pull/420) +- deprecated -intra parameter in ffmpeg to new `-g` [\#417](https://github.com/pypeclub/pype/pull/417) +- Delivery action can now work with entered path [\#397](https://github.com/pypeclub/pype/pull/397) + + + + + +### [2.11.8](https://github.com/pypeclub/pype/tree/2.11.8) ## + +_**release date:** 27 Aug 2020_ + +[Full Changelog](https://github.com/pypeclub/pype/compare/2.11.7...2.11.8) + +**Fixed bugs:** + +- pyblish pype - other group is collapsed before plugins are done [\#431](https://github.com/pypeclub/pype/issues/431) +- Alpha white edges in harmony on PNGs [\#412](https://github.com/pypeclub/pype/issues/412) +- harmony image loader picks wrong representations [\#404](https://github.com/pypeclub/pype/issues/404) +- Clockify crash when response contain symbol not allowed by UTF-8 [\#81](https://github.com/pypeclub/pype/issues/81) + + + + +### [2.11.7](https://github.com/pypeclub/pype/tree/2.11.7) ## + +_**release date:** 21 Aug 2020_ + + +[Full Changelog](https://github.com/pypeclub/pype/compare/2.11.6...2.11.7) + +**Fixed bugs:** + +- Clean Up Baked Movie [\#369](https://github.com/pypeclub/pype/issues/369) +- celaction last workfile wasn't picked up correctly [\#459](https://github.com/pypeclub/pype/pull/459) + + + +### [2.11.5](https://github.com/pypeclub/pype/tree/2.11.5) ## + +_**release date:** 13 Aug 2020_ + +[Full Changelog](https://github.com/pypeclub/pype/compare/2.11.4...2.11.5) + +**Enhancements:** + +- Standalone publisher now only groups sequence if the extension is known [\#439](https://github.com/pypeclub/pype/pull/439) + +**Fixed bugs:** + +- Logs have been disable for editorial by default to speed up publishing [\#433](https://github.com/pypeclub/pype/pull/433) +- Various fixes for celaction [\#430](https://github.com/pypeclub/pype/pull/430) +- Harmony: invalid variable scope in validate scene settings [\#428](https://github.com/pypeclub/pype/pull/428) +- Harmomny: new representation name for audio was not accepted [\#427](https://github.com/pypeclub/pype/pull/427) + + + + +### [2.11.3](https://github.com/pypeclub/pype/tree/2.11.3) ## + +_**release date:** 4 Aug 2020_ + +[Full Changelog](https://github.com/pypeclub/pype/compare/2.11.2...2.11.3) + +**Fixed bugs:** + +- Harmony: publishing performance issues [\#408](https://github.com/pypeclub/pype/pull/408) + + + + +## 2.11.0 ## + +_**release date:** 27 July 2020_ + +**new:** +- _(blender)_ namespace support [\#341](https://github.com/pypeclub/pype/pull/341) +- _(blender)_ start end frames [\#330](https://github.com/pypeclub/pype/pull/330) +- _(blender)_ camera asset [\#322](https://github.com/pypeclub/pype/pull/322) +- _(pype)_ toggle instances per family in pyblish GUI [\#320](https://github.com/pypeclub/pype/pull/320) +- _(pype)_ current release version is now shown in the tray menu [#379](https://github.com/pypeclub/pype/pull/379) + + +**improved:** +- _(resolve)_ tagging for publish [\#239](https://github.com/pypeclub/pype/issues/239) +- _(pype)_ Support publishing a subset of shots with standalone editorial [\#336](https://github.com/pypeclub/pype/pull/336) +- _(harmony)_ Basic support for palettes [\#324](https://github.com/pypeclub/pype/pull/324) +- _(photoshop)_ Flag outdated containers on startup and publish. [\#309](https://github.com/pypeclub/pype/pull/309) +- _(harmony)_ Flag Outdated containers [\#302](https://github.com/pypeclub/pype/pull/302) +- _(photoshop)_ Publish review [\#298](https://github.com/pypeclub/pype/pull/298) +- _(pype)_ Optional Last workfile launch [\#365](https://github.com/pypeclub/pype/pull/365) + + +**fixed:** +- _(premiere)_ workflow fixes [\#346](https://github.com/pypeclub/pype/pull/346) +- _(pype)_ pype-setup does not work with space in path [\#327](https://github.com/pypeclub/pype/issues/327) +- _(ftrack)_ Ftrack delete action cause circular error [\#206](https://github.com/pypeclub/pype/issues/206) +- _(nuke)_ Priority was forced to 50 [\#345](https://github.com/pypeclub/pype/pull/345) +- _(nuke)_ Fix ValidateNukeWriteKnobs [\#340](https://github.com/pypeclub/pype/pull/340) +- _(maya)_ If camera attributes are connected, we can ignore them. [\#339](https://github.com/pypeclub/pype/pull/339) +- _(pype)_ stop appending of tools environment to existing env [\#337](https://github.com/pypeclub/pype/pull/337) +- _(ftrack)_ Ftrack timeout needs to look at AVALON\_TIMEOUT [\#325](https://github.com/pypeclub/pype/pull/325) +- _(harmony)_ Only zip files are supported. [\#310](https://github.com/pypeclub/pype/pull/310) +- _(pype)_ hotfix/Fix event server mongo uri [\#305](https://github.com/pypeclub/pype/pull/305) +- _(photoshop)_ Subset was not named or validated correctly. [\#304](https://github.com/pypeclub/pype/pull/304) + + + + + +## 2.10.0 ## + +_**release date:** 17 June 2020_ + +**new:** +- _(harmony)_ **Toon Boom Harmony** has been greatly extended to support rigging, scene build, animation and rendering workflows. [#270](https://github.com/pypeclub/pype/issues/270) [#271](https://github.com/pypeclub/pype/issues/271) [#190](https://github.com/pypeclub/pype/issues/190) [#191](https://github.com/pypeclub/pype/issues/191) [#172](https://github.com/pypeclub/pype/issues/172) [#168](https://github.com/pypeclub/pype/issues/168) +- _(pype)_ Added support for rudimentary **edl publishing** into individual shots. [#265](https://github.com/pypeclub/pype/issues/265) +- _(celaction)_ Simple **Celaction** integration has been added with support for workfiles and rendering. [#255](https://github.com/pypeclub/pype/issues/255) +- _(maya)_ Support for multiple job types when submitting to the farm. We can now render Maya or Standalone render jobs for Vray and Arnold (limited support for arnold) [#204](https://github.com/pypeclub/pype/issues/204) +- _(photoshop)_ Added initial support for Photoshop [#232](https://github.com/pypeclub/pype/issues/232) + +**improved:** +- _(blender)_ Updated support for rigs and added support Layout family [#233](https://github.com/pypeclub/pype/issues/233) [#226](https://github.com/pypeclub/pype/issues/226) +- _(premiere)_ It is now possible to choose different storage root for workfiles of different task types. [#255](https://github.com/pypeclub/pype/issues/255) +- _(maya)_ Support for unmerged AOVs in Redshift multipart EXRs [#197](https://github.com/pypeclub/pype/issues/197) +- _(pype)_ Pype repository has been refactored in preparation for 3.0 release [#169](https://github.com/pypeclub/pype/issues/169) +- _(deadline)_ All file dependencies are now passed to deadline from maya to prevent premature start of rendering if caches or textures haven't been coppied over yet. [#195](https://github.com/pypeclub/pype/issues/195) +- _(nuke)_ Script validation can now be made optional. [#194](https://github.com/pypeclub/pype/issues/194) +- _(pype)_ Publishing can now be stopped at any time. [#194](https://github.com/pypeclub/pype/issues/194) + +**fix:** +- _(pype)_ Pyblish-lite has been integrated into pype repository, plus various publishing GUI fixes. [#274](https://github.com/pypeclub/pype/issues/274) [#275](https://github.com/pypeclub/pype/issues/275) [#268](https://github.com/pypeclub/pype/issues/268) [#227](https://github.com/pypeclub/pype/issues/227) [#238](https://github.com/pypeclub/pype/issues/238) +- _(maya)_ Alembic extractor was getting wrong frame range type in certain scenarios [#254](https://github.com/pypeclub/pype/issues/254) +- _(maya)_ Attaching a render to subset in maya was not passing validation in certain scenarios [#256](https://github.com/pypeclub/pype/issues/256) +- _(ftrack)_ Various small fixes to ftrack sync [#263](https://github.com/pypeclub/pype/issues/263) [#259](https://github.com/pypeclub/pype/issues/259) +- _(maya)_ Look extraction is now able to skp invalid connections in shaders [#207](https://github.com/pypeclub/pype/issues/207) + + + + + +## 2.9.0 ## + +_**release date:** 25 May 2020_ + +**new:** +- _(pype)_ Support for **Multiroot projects**. You can now store project data on multiple physical or virtual storages and target individual publishes to these locations. For instance render can be stored on a faster storage than the rest of the project. [#145](https://github.com/pypeclub/pype/issues/145), [#38](https://github.com/pypeclub/pype/issues/38) +- _(harmony)_ Basic implementation of **Toon Boom Harmony** has been added. [#142](https://github.com/pypeclub/pype/issues/142) +- _(pype)_ OSX support is in public beta now. There are issues to be expected, but the main implementation should be functional. [#141](https://github.com/pypeclub/pype/issues/141) + + +**improved:** + +- _(pype)_ **Review extractor** has been completely rebuilt. It now supports granular filtering so you can create **multiple outputs** for different tasks, families or hosts. [#103](https://github.com/pypeclub/pype/issues/103), [#166](https://github.com/pypeclub/pype/issues/166), [#165](https://github.com/pypeclub/pype/issues/165) +- _(pype)_ **Burnin** generation had been extended to **support same multi-output filtering** as review extractor [#103](https://github.com/pypeclub/pype/issues/103) +- _(pype)_ Publishing file templates can now be specified in config for each individual family [#114](https://github.com/pypeclub/pype/issues/114) +- _(pype)_ Studio specific plugins can now be appended to pype standard publishing plugins. [#112](https://github.com/pypeclub/pype/issues/112) +- _(nukestudio)_ Reviewable clips no longer need to be previously cut, exported and re-imported to timeline. **Pype can now dynamically cut reviewable quicktimes** from continuous offline footage during publishing. [#23](https://github.com/pypeclub/pype/issues/23) +- _(deadline)_ Deadline can now correctly differentiate between staging and production pype. [#154](https://github.com/pypeclub/pype/issues/154) +- _(deadline)_ `PYPE_PYTHON_EXE` env variable can now be used to direct publishing to explicit python installation. [#120](https://github.com/pypeclub/pype/issues/120) +- _(nuke)_ Nuke now check for new version of loaded data on file open. [#140](https://github.com/pypeclub/pype/issues/140) +- _(nuke)_ frame range and limit checkboxes are now exposed on write node. [#119](https://github.com/pypeclub/pype/issues/119) + + + +**fix:** + +- _(nukestudio)_ Project Location was using backslashes which was breaking nukestudio native exporting in certains configurations [#82](https://github.com/pypeclub/pype/issues/82) +- _(nukestudio)_ Duplicity in hierarchy tags was prone to throwing publishing error [#130](https://github.com/pypeclub/pype/issues/130), [#144](https://github.com/pypeclub/pype/issues/144) +- _(ftrack)_ multiple stability improvements [#157](https://github.com/pypeclub/pype/issues/157), [#159](https://github.com/pypeclub/pype/issues/159), [#128](https://github.com/pypeclub/pype/issues/128), [#118](https://github.com/pypeclub/pype/issues/118), [#127](https://github.com/pypeclub/pype/issues/127) +- _(deadline)_ multipart EXRs were stopping review publishing on the farm. They are still not supported for automatic review generation, but the publish will go through correctly without the quicktime. [#155](https://github.com/pypeclub/pype/issues/155) +- _(deadline)_ If deadline is non-responsive it will no longer freeze host when publishing [#149](https://github.com/pypeclub/pype/issues/149) +- _(deadline)_ Sometimes deadline was trying to launch render before all the source data was coppied over. [#137](https://github.com/pypeclub/pype/issues/137) _(harmony)_ Basic implementation of **Toon Boom Harmony** has been added. [#142](https://github.com/pypeclub/pype/issues/142) +- _(nuke)_ Filepath knob wasn't updated properly. [#131](https://github.com/pypeclub/pype/issues/131) +- _(maya)_ When extracting animation, the "Write Color Set" options on the instance were not respected. [#108](https://github.com/pypeclub/pype/issues/108) +- _(maya)_ Attribute overrides for AOV only worked for the legacy render layers. Now it works for new render setup as well [#132](https://github.com/pypeclub/pype/issues/132) +- _(maya)_ Stability and usability improvements in yeti workflow [#104](https://github.com/pypeclub/pype/issues/104) + + + + + +## 2.8.0 ## + +_**release date:** 20 April 2020_ + +**new:** + +- _(pype)_ Option to generate slates from json templates. [PYPE-628] [#26](https://github.com/pypeclub/pype/issues/26) +- _(pype)_ It is now possible to automate loading of published subsets into any scene. Documentation will follow :). [PYPE-611] [#24](https://github.com/pypeclub/pype/issues/24) + +**fix:** + +- _(maya)_ Some Redshift render tokens could break publishing. [PYPE-778] [#33](https://github.com/pypeclub/pype/issues/33) +- _(maya)_ Publish was not preserving maya file extension. [#39](https://github.com/pypeclub/pype/issues/39) +- _(maya)_ Rig output validator was failing on nodes without shapes. [#40](https://github.com/pypeclub/pype/issues/40) +- _(maya)_ Yeti caches can now be properly versioned up in the scene inventory. [#40](https://github.com/pypeclub/pype/issues/40) +- _(nuke)_ Build first workfiles was not accepting jpeg sequences. [#34](https://github.com/pypeclub/pype/issues/34) +- _(deadline)_ Trying to generate ffmpeg review from multipart EXRs no longer crashes publishing. [PYPE-781] +- _(deadline)_ Render publishing is more stable in multiplatform environments. [PYPE-775] + + + + + +## 2.7.0 ## + +_**release date:** 30 March 2020_ + +**new:** + +- _(maya)_ Artist can now choose to load multiple references of the same subset at once [PYPE-646, PYPS-81] +- _(nuke)_ Option to use named OCIO colorspaces for review colour baking. [PYPS-82] +- _(pype)_ Pype can now work with `master` versions for publishing and loading. These are non-versioned publishes that are overwritten with the latest version during publish. These are now supported in all the GUIs, but their publishing is deactivated by default. [PYPE-653] +- _(blender)_ Added support for basic blender workflow. We currently support `rig`, `model` and `animation` families. [PYPE-768] +- _(pype)_ Source timecode can now be used in burn-ins. [PYPE-777] +- _(pype)_ Review outputs profiles can now specify delivery resolution different than project setting [PYPE-759] +- _(nuke)_ Bookmark to current context is now added automatically to all nuke browser windows. [PYPE-712] + +**change:** + +- _(maya)_ It is now possible to publish camera without. baking. Keep in mind that unbaked cameras can't be guaranteed to work in other hosts. [PYPE-595] +- _(maya)_ All the renders from maya are now grouped in the loader by their Layer name. [PYPE-482] +- _(nuke/hiero)_ Any publishes from nuke and hiero can now be versioned independently of the workfile. [PYPE-728] + + +**fix:** + +- _(nuke)_ Mixed slashes caused issues in ocio config path. +- _(pype)_ Intent field in pyblish GUI was passing label instead of value to ftrack. [PYPE-733] +- _(nuke)_ Publishing of pre-renders was inconsistent. [PYPE-766] +- _(maya)_ Handles and frame ranges were inconsistent in various places during publishing. +- _(nuke)_ Nuke was crashing if it ran into certain missing knobs. For example DPX output missing `autocrop` [PYPE-774] +- _(deadline)_ Project overrides were not working properly with farm render publishing. +- _(hiero)_ Problems with single frame plates publishing. +- _(maya)_ Redshift RenderPass token were breaking render publishing. [PYPE-778] +- _(nuke)_ Build first workfile was not accepting jpeg sequences. +- _(maya)_ Multipart (Multilayer) EXRs were breaking review publishing due to FFMPEG incompatiblity [PYPE-781] + + + + +## 2.6.0 ## + +_**release date:** 9 March 2020_ + +**change:** +- _(maya)_ render publishing has been simplified and made more robust. Render setup layers are now automatically added to publishing subsets and `render globals` family has been replaced with simple `render` [PYPE-570] +- _(avalon)_ change context and workfiles apps, have been merged into one, that allows both actions to be performed at the same time. [PYPE-747] +- _(pype)_ thumbnails are now automatically propagate to asset from the last published subset in the loader +- _(ftrack)_ publishing comment and intent are now being published to ftrack note as well as describtion. [PYPE-727] +- _(pype)_ when overriding existing version new old representations are now overriden, instead of the new ones just being appended. (to allow this behaviour, the version validator need to be disabled. [PYPE-690]) +- _(pype)_ burnin preset has been significantly simplified. It now doesn't require passing function to each field, but only need the actual text template. to use this, all the current burnin PRESETS MUST BE UPDATED for all the projects. +- _(ftrack)_ credentials are now stored on a per server basis, so it's possible to switch between ftrack servers without having to log in and out. [PYPE-723] + + +**new:** +- _(pype)_ production and development deployments now have different colour of the tray icon. Orange for Dev and Green for production [PYPE-718] +- _(maya)_ renders can now be attached to a publishable subset rather than creating their own subset. For example it is possible to create a reviewable `look` or `model` render and have it correctly attached as a representation of the subsets [PYPE-451] +- _(maya)_ after saving current scene into a new context (as a new shot for instance), all the scene publishing subsets data gets re-generated automatically to match the new context [PYPE-532] +- _(pype)_ we now support project specific publish, load and create plugins [PYPE-740] +- _(ftrack)_ new action that allow archiving/deleting old published versions. User can keep how many of the latest version to keep when the action is ran. [PYPE-748, PYPE-715] +- _(ftrack)_ it is now possible to monitor and restart ftrack event server using ftrack action. [PYPE-658] +- _(pype)_ validator that prevent accidental overwrites of previously published versions. [PYPE-680] +- _(avalon)_ avalon core updated to version 5.6.0 +- _(maya)_ added validator to make sure that relative paths are used when publishing arnold standins. +- _(nukestudio)_ it is now possible to extract and publish audio family from clip in nuke studio [PYPE-682] + +**fix**: +- _(maya)_ maya set framerange button was ignoring handles [PYPE-719] +- _(ftrack)_ sync to avalon was sometime crashing when ran on empty project +- _(nukestudio)_ publishing same shots after they've been previously archived/deleted would result in a crash. [PYPE-737] +- _(nuke)_ slate workflow was breaking in certain scenarios. [PYPE-730] +- _(pype)_ rendering publish workflow has been significantly improved to prevent error resulting from implicit render collection. [PYPE-665, PYPE-746] +- _(pype)_ launching application on a non-synced project resulted in obscure [PYPE-528] +- _(pype)_ missing keys in burnins no longer result in an error. [PYPE-706] +- _(ftrack)_ create folder structure action was sometimes failing for project managers due to wrong permissions. +- _(Nukestudio)_ using `source` in the start frame tag could result in wrong frame range calculation +- _(ftrack)_ sync to avalon action and event have been improved by catching more edge cases and provessing them properly. + + + + +## 2.5.0 ## + +_**release date:** 11 Feb 2020_ + +**change:** +- _(pype)_ added many logs for easier debugging +- _(pype)_ review presets can now be separated between 2d and 3d renders [PYPE-693] +- _(pype)_ anatomy module has been greatly improved to allow for more dynamic pulblishing and faster debugging [PYPE-685] +- _(pype)_ avalon schemas have been moved from `pype-config` to `pype` repository, for simplification. [PYPE-670] +- _(ftrack)_ updated to latest ftrack API +- _(ftrack)_ publishing comments now appear in ftrack also as a note on version with customisable category [PYPE-645] +- _(ftrack)_ delete asset/subset action had been improved. It is now able to remove multiple entities and descendants of the selected entities [PYPE-361, PYPS-72] +- _(workfiles)_ added date field to workfiles app [PYPE-603] +- _(maya)_ old deprecated loader have been removed in favour of a single unified reference loader (old scenes will upgrade automatically to the new loader upon opening) [PYPE-633, PYPE-697] +- _(avalon)_ core updated to 5.5.15 [PYPE-671] +- _(nuke)_ library loader is now available in nuke [PYPE-698] + + +**new:** +- _(pype)_ added pype render wrapper to allow rendering on mixed platform farms. [PYPE-634] +- _(pype)_ added `pype launch` command. It let's admin run applications with dynamically built environment based on the given context. [PYPE-634] +- _(pype)_ added support for extracting review sequences with burnins [PYPE-657] +- _(publish)_ users can now set intent next to a comment when publishing. This will then be reflected on an attribute in ftrack. [PYPE-632] +- _(burnin)_ timecode can now be added to burnin +- _(burnin)_ datetime keys can now be added to burnin and anatomy [PYPE-651] +- _(burnin)_ anatomy templates can now be used in burnins. [PYPE=626] +- _(nuke)_ new validator for render resolution +- _(nuke)_ support for attach slate to nuke renders [PYPE-630] +- _(nuke)_ png sequences were added to loaders +- _(maya)_ added maya 2020 compatibility [PYPE-677] +- _(maya)_ ability to publish and load .ASS standin sequences [PYPS-54] +- _(pype)_ thumbnails can now be published and are visible in the loader. `AVALON_THUMBNAIL_ROOT` environment variable needs to be set for this to work [PYPE-573, PYPE-132] +- _(blender)_ base implementation of blender was added with publishing and loading of .blend files [PYPE-612] +- _(ftrack)_ new action for preparing deliveries [PYPE-639] + + +**fix**: +- _(burnin)_ more robust way of finding ffmpeg for burnins. +- _(pype)_ improved UNC paths remapping when sending to farm. +- _(pype)_ float frames sometimes made their way to representation context in database, breaking loaders [PYPE-668] +- _(pype)_ `pype install --force` was failing sometimes [PYPE-600] +- _(pype)_ padding in published files got calculated wrongly sometimes. It is now instead being always read from project anatomy. [PYPE-667] +- _(publish)_ comment publishing was failing in certain situations +- _(ftrack)_ multiple edge case scenario fixes in auto sync and sync-to-avalon action +- _(ftrack)_ sync to avalon now works on empty projects +- _(ftrack)_ thumbnail update event was failing when deleting entities [PYPE-561] +- _(nuke)_ loader applies proper colorspaces from Presets +- _(nuke)_ publishing handles didn't always work correctly [PYPE-686] +- _(maya)_ assembly publishing and loading wasn't working correctly + + + + + + +## 2.4.0 ## + +_**release date:** 9 Dec 2019_ + +**change:** +- _(ftrack)_ version to status ftrack event can now be configured from Presets + - based on preset `presets/ftracc/ftrack_config.json["status_version_to_task"]` +- _(ftrack)_ sync to avalon event has been completely re-written. It now supports most of the project management situations on ftrack including moving, renaming and deleting entities, updating attributes and working with tasks. +- _(ftrack)_ sync to avalon action has been also re-writen. It is now much faster (up to 100 times depending on a project structure), has much better logging and reporting on encountered problems, and is able to handle much more complex situations. +- _(ftrack)_ sync to avalon trigger by checking `auto-sync` toggle on ftrack [PYPE-504] +- _(pype)_ various new features in the REST api +- _(pype)_ new visual identity used across pype +- _(pype)_ started moving all requirements to pip installation rather than vendorising them in pype repository. Due to a few yet unreleased packages, this means that pype can temporarily be only installed in the offline mode. + +**new:** +- _(nuke)_ support for publishing gizmos and loading them as viewer processes +- _(nuke)_ support for publishing nuke nodes from backdrops and loading them back +- _(pype)_ burnins can now work with start and end frames as keys + - use keys `{frame_start}`, `{frame_end}` and `{current_frame}` in burnin preset to use them. [PYPS-44,PYPS-73, PYPE-602] +- _(pype)_ option to filter logs by user and level in loggin GUI +- _(pype)_ image family added to standalone publisher [PYPE-574] +- _(pype)_ matchmove family added to standalone publisher [PYPE-574] +- _(nuke)_ validator for comparing arbitrary knobs with values from presets +- _(maya)_ option to force maya to copy textures in the new look publish rather than hardlinking them +- _(pype)_ comments from pyblish GUI are now being added to ftrack version +- _(maya)_ validator for checking outdated containers in the scene +- _(maya)_ option to publish and load arnold standin sequence [PYPE-579, PYPS-54] + +**fix**: +- _(pype)_ burnins were not respecting codec of the input video +- _(nuke)_ lot's of various nuke and nuke studio fixes across the board [PYPS-45] +- _(pype)_ workfiles app is not launching with the start of the app by default [PYPE-569] +- _(ftrack)_ ftrack integration during publishing was failing under certain situations [PYPS-66] +- _(pype)_ minor fixes in REST api +- _(ftrack)_ status change event was crashing when the target status was missing [PYPS-68] +- _(ftrack)_ actions will try to reconnect if they fail for some reason +- _(maya)_ problems with fps mapping when using float FPS values +- _(deadline)_ overall improvements to deadline publishing +- _(setup)_ environment variables are now remapped on the fly based on the platform pype is running on. This fixes many issues in mixed platform environments. + + + + +## 2.3.6 # + +_**release date:** 27 Nov 2019_ + +**hotfix**: +- _(ftrack)_ was hiding important debug logo +- _(nuke)_ crashes during workfile publishing +- _(ftrack)_ event server crashes because of signal problems +- _(muster)_ problems with muster render submissions +- _(ftrack)_ thumbnail update event syntax errors + + + + +## 2.3.0 ## + +_release date: 6 Oct 2019_ + +**new**: +- _(maya)_ support for yeti rigs and yeti caches +- _(maya)_ validator for comparing arbitrary attributes against ftrack +- _(pype)_ burnins can now show current date and time +- _(muster)_ pools can now be set in render globals in maya +- _(pype)_ Rest API has been implemented in beta stage +- _(nuke)_ LUT loader has been added +- _(pype)_ rudimentary user module has been added as preparation for user management +- _(pype)_ a simple logging GUI has been added to pype tray +- _(nuke)_ nuke can now bake input process into mov +- _(maya)_ imported models now have selection handle displayed by defaulting +- _(avalon)_ it's is now possible to load multiple assets at once using loader +- _(maya)_ added ability to automatically connect yeti rig to a mesh upon loading + +**changed**: +- _(ftrack)_ event server now runs two parallel processes and is able to keep queue of events to process. +- _(nuke)_ task name is now added to all rendered subsets +- _(pype)_ adding more families to standalone publisher +- _(pype)_ standalone publisher now uses pyblish-lite +- _(pype)_ standalone publisher can now create review quicktimes +- _(ftrack)_ queries to ftrack were sped up +- _(ftrack)_ multiple ftrack action have been deprecated +- _(avalon)_ avalon upstream has been updated to 5.5.0 +- _(nukestudio)_ published transforms can now be animated +- + +**fix**: +- _(maya)_ fps popup button didn't work in some cases +- _(maya)_ geometry instances and references in maya were losing shader assignments +- _(muster)_ muster rendering templates were not working correctly +- _(maya)_ arnold tx texture conversion wasn't respecting colorspace set by the artist +- _(pype)_ problems with avalon db sync +- _(maya)_ ftrack was rounding FPS making it inconsistent +- _(pype)_ wrong icon names in Creator +- _(maya)_ scene inventory wasn't showing anything if representation was removed from database after it's been loaded to the scene +- _(nukestudio)_ multiple bugs squashed +- _(loader)_ loader was taking long time to show all the loading action when first launcher in maya + +## 2.2.0 ## +_**release date:** 8 Sept 2019_ + +**new**: +- _(pype)_ add customisable workflow for creating quicktimes from renders or playblasts +- _(nuke)_ option to choose deadline chunk size on write nodes +- _(nukestudio)_ added option to publish soft effects (subTrackItems) from NukeStudio as subsets including LUT files. these can then be loaded in nuke or NukeStudio +- _(nuke)_ option to build nuke script from previously published latest versions of plate and render subsets. +- _(nuke)_ nuke writes now have deadline tab. +- _(ftrack)_ Prepare Project action can now be used for creating the base folder structure on disk and in ftrack, setting up all the initial project attributes and it automatically prepares `pype_project_config` folder for the given project. +- _(clockify)_ Added support for time tracking in clockify. This currently in addition to ftrack time logs, but does not completely replace them. +- _(pype)_ any attributes in Creator and Loader plugins can now be customised using pype preset system + +**changed**: +- nukestudio now uses workio API for workfiles +- _(maya)_ "FIX FPS" prompt in maya now appears in the middle of the screen +- _(muster)_ can now be configured with custom templates +- _(pype)_ global publishing plugins can now be configured using presets as well as host specific ones + + +**fix**: +- wrong version retrieval from path in certain scenarios +- nuke reset resolution wasn't working in certain scenarios + +## 2.1.0 ## +_release date: 6 Aug 2019_ + +A large cleanup release. Most of the change are under the hood. + +**new**: +- _(pype)_ add customisable workflow for creating quicktimes from renders or playblasts +- _(pype)_ Added configurable option to add burnins to any generated quicktimes +- _(ftrack)_ Action that identifies what machines pype is running on. +- _(system)_ unify subprocess calls +- _(maya)_ add audio to review quicktimes +- _(nuke)_ add crop before write node to prevent overscan problems in ffmpeg +- **Nuke Studio** publishing and workfiles support +- **Muster** render manager support +- _(nuke)_ Framerange, FPS and Resolution are set automatically at startup +- _(maya)_ Ability to load published sequences as image planes +- _(system)_ Ftrack event that sets asset folder permissions based on task assignees in ftrack. +- _(maya)_ Pyblish plugin that allow validation of maya attributes +- _(system)_ added better startup logging to tray debug, including basic connection information +- _(avalon)_ option to group published subsets to groups in the loader +- _(avalon)_ loader family filters are working now + +**changed**: +- change multiple key attributes to unify their behaviour across the pipeline + - `frameRate` to `fps` + - `startFrame` to `frameStart` + - `endFrame` to `frameEnd` + - `fstart` to `frameStart` + - `fend` to `frameEnd` + - `handle_start` to `handleStart` + - `handle_end` to `handleEnd` + - `resolution_width` to `resolutionWidth` + - `resolution_height` to `resolutionHeight` + - `pixel_aspect` to `pixelAspect` + +- _(nuke)_ write nodes are now created inside group with only some attributes editable by the artist +- rendered frames are now deleted from temporary location after their publishing is finished. +- _(ftrack)_ RV action can now be launched from any entity +- after publishing only refresh button is now available in pyblish UI +- added context instance pyblish-lite so that artist knows if context plugin fails +- _(avalon)_ allow opening selected files using enter key +- _(avalon)_ core updated to v5.2.9 with our forked changes on top + +**fix**: +- faster hierarchy retrieval from db +- _(nuke)_ A lot of stability enhancements +- _(nuke studio)_ A lot of stability enhancements +- _(nuke)_ now only renders a single write node on farm +- _(ftrack)_ pype would crash when launcher project level task +- work directory was sometimes not being created correctly +- major pype.lib cleanup. Removing of unused functions, merging those that were doing the same and general house cleaning. +- _(avalon)_ subsets in maya 2019 weren't behaving correctly in the outliner diff --git a/website/docs/dev_build.md b/website/docs/dev_build.md new file mode 100644 index 0000000000..9523035705 --- /dev/null +++ b/website/docs/dev_build.md @@ -0,0 +1,189 @@ +--- +id: dev_build +title: Build openPYPE from source +sidebar_label: Build +--- + +import Tabs from '@theme/Tabs'; +import TabItem from '@theme/TabItem'; + + +To build Pype you currently need (on all platforms): + +- **[Python 3.7](https://www.python.org/downloads/)** as we are following [vfx platform](https://vfxplatform.com). +- **[git](https://git-scm.com/downloads)** + +We use [CX_Freeze](https://cx-freeze.readthedocs.io/en/latest) to freeze the code and all dependencies. + + + + + + +More tools might be needed for installing some dependencies (for example for **OpenTimelineIO**) - mostly +development tools like [CMake](https://cmake.org/) and [Visual Studio](https://visualstudio.microsoft.com/cs/downloads/) + +### Clone repository: +```sh +git clone --recurse-submodules git@github.com:pypeclub/pype.git +``` + +### Run from source + +For development purposes it is possible to run OpenPype directly from the source. We provide a simple launcher script for this. + +To start OpenPype from source you need to + +1) Run `.\tools\create_env.ps1` to create virtual environment in `.\venv` +2) Run `.\tools\run_tray.ps1` if you have all required dependencies on your machine you should be greeted with OpenPype igniter window and once you give it your Mongo URL, with OpenPype icon in the system tray. + + +### To build openPype: + +1) Run `.\tools\create_env.ps1` to create virtual environment in `.\venv` +2) Run `.\tools\build.ps1` to build pype executables in `.\build\` + +To create distributable openPype versions, run `./tools/create_zip.ps1` - that will +create zip file with name `pype-vx.x.x.zip` parsed from current pype repository and +copy it to user data dir. You can specify `--path /path/to/zip` to force it into a different +location. This can be used to prepare new version releases for artists in the studio environment +without the need to re-build the whole package + + + + + + +To build pype on linux you wil need: + +- **[curl](https://curl.se)** on systems that doesn't have one preinstalled. +- Python header files installed (**python3-dev** on Ubuntu for example). +- **bzip2**, **readline**, **sqlite3** and other libraries. + +Because some Linux distros come with newer Python version pre-installed, you might +need to install **3.7** version and make use of it explicitly. +Your best bet is probably using [pyenv](https://github.com/pyenv/pyenv). + +You can use your package manager to install **git** and other packages to your build +environment. +Use curl for pyenv installation + +:::note Install build requirements for **Ubuntu** + +```sh +sudo apt-get update; sudo apt-get install --no-install-recommends make build-essential libssl-dev zlib1g-dev libbz2-dev libreadline-dev libsqlite3-dev wget curl llvm libncurses5-dev xz-utils tk-dev libxml2-dev libxmlsec1-dev libffi-dev liblzma-dev git +``` + +In case you run in error about `xcb` when running Pype, +you'll need also additional libraries for Qt5: + +```sh +sudo apt install qt5-default +``` +::: + +:::note Install build requirements for **Centos** + +```sh +yum install gcc zlib-devel bzip2 bzip2-devel readline-devel sqlite sqlite-devel openssl-devel tk-devel libffi-devel git +``` + +In case you run in error about `xcb` when running Pype, +you'll need also additional libraries for Qt5: + +```sh +sudo yum install qt5-qtbase-devel +``` + +::: + +For more information about setting your build environmet please refer to [pyenv suggested build environment](https://github.com/pyenv/pyenv/wiki#suggested-build-environment) + +#### Common steps for all Distros + +Use pyenv to prepare Python version for Pype build + +```sh +curl https://pyenv.run | bash + +# you can add those to ~/.bashrc +export PATH="$HOME/.pyenv/bin:$PATH" +eval "$(pyenv init -)" +eval "$(pyenv virtualenv-init -)" + +# reload shell +exec $SHELL + +# install Python 3.7.9 +pyenv install -v 3.7.9 + +# change path to pype 3 +cd /path/to/pype-3 + +# set local python version +pyenv local 3.7.9 + +``` + +#### To build Pype: + +1. Run `.\tools\create_env.sh` to create virtual environment in `.\venv` +2. Run `.\tools\build.sh` to build pype executables in `.\build\` + + + + +To build pype on MacOS you wil need: + +- **[Homebrew](https://brew.sh)**, Easy way of installing everything necessary is to use. +- **[CMake](https://cmake.org/)** to build some external openPype dependencies. +- **XCode Command Line Tools** (or some other build system) + +1) Install **Homebrew**: +```sh +/bin/bash -c "$(curl -fsSL https://raw.githubusercontent.com/Homebrew/install/HEAD/install.sh)" +``` + +2) Install **cmake**: +```sh +brew install cmake +``` + +3) Install [pyenv](https://github.com/pyenv/pyenv): +```sh +brew install pyenv +echo 'eval "$(pypenv init -)"' >> ~/.zshrc +pyenv init +exec "$SHELL" +PATH=$(pyenv root)/shims:$PATH +``` + +4) Pull in required Python version 3.7.x +```sh +# install Python build dependences +brew install openssl readline sqlite3 xz zlib + +# replace with up-to-date 3.7.x version +pyenv install 3.7.9 +``` + +5) Set local Python version +```sh +# switch to Pype source directory +pyenv local 3.7.9 +``` + +#### To build Pype: + +1. Run `.\tools\create_env.sh` to create virtual environment in `.\venv` +2. Run `.\tools\build.sh` to build Pype executables in `.\build\` + + + diff --git a/website/docs/dev_contribute.md b/website/docs/dev_contribute.md new file mode 100644 index 0000000000..6655ec88c1 --- /dev/null +++ b/website/docs/dev_contribute.md @@ -0,0 +1,85 @@ +--- +id: dev_contribute +title: Contribute to openPype development +sidebar_label: Contribute +--- + +## What should you do if ... + +### You found a bug. + +1. Check in the issues and our [bug triage](https://github.com/pypeclub/pype/projects/2) to make sure it wasn't reported already. +2. Ask on our [discord](http://pype.community/chat) Often, what appears as a bug, might be the intended behavior for someone else. +3. Create a new issue. +4. Use the issue template for you PR please. + + +### You wrote a patch that fixes a bug. + +- Open a new GitHub pull request with the patch. +- Ensure the PR description clearly describes the problem and solution. Include the relevant issue number if applicable. + + +### You intend to add a new feature or change an existing one. + +- Open a new thread in the [github discussions](https://github.com/pypeclub/pype/discussions/new) +- Do not open issue until the suggestion is discussed. We will convert accepted suggestions into backlog and point them to the relevant discussion thread to keep the context. + +### You have questions about the source code. + +Open a new question on [github discussions](https://github.com/pypeclub/pype/discussions/new) + + +## Branching Strategy + +As we move to 3.x as the primary supported version of OpenPype and only keep Pype 2.15 on bugfixes and client sponsored feature requests, we need to be very careful with merging strategy. + +These are the important branches to remember. + +### OpenPype 3.x + +**`main`** - Production branch with stable releases + +**`develop`** - Development branch where we merge all PRs during the development + +**`release/3.x.x`** - Testing branch for a release, once a release branch is crated, no new features +are accepted for the given release. Bugfixes, however, are expected. Once the branch is stable it is +merged to `main` and `develop` and `main` is tagged with a new release + +**`feature/{Issue#}-{Issue_name}`** - development of new features + +**`bugfix/{Issue#}-{Issue_name}`** - bug fixes + +**`hotfix/{Issue#}-{Issue_name}`** - production critical hotfixes (always created from `main`) + +### OpenPype 2.x + +Branching is identical to 3.x development, however all the branches should be pre-pended with +`2.x/` namespace. For example `2.x/feature/1025-support_exporting_of_alembic`, +`2.x/bugfix/wrong_colourspace_in_maya` + +Main and develop for 2.x development are `2.x/main` and `2.x/develop` + + +A few important notes about 2.x and 3.x development: + +- 3.x features are not backported to 2.x unless specifically requested. +- 3.x bugs and hotfixes can be ported to 2.x if they are relevant or severe. +- 2.x features and bugs MUST be ported to 3.x at the same time. + +## Pull Requests + +- Each 2.x PR MUST have a corresponding 3.x PR in github. Without 3.x PR, 2.x features will not be merged! Luckily most of the code is compatible, albeit sometimes in a different place after the refactoring. Porting from 2.x to 3.x should be really easy. +- Please keep the corresponding 2 and 3 PR names the same so they can be easily identified from the PR list page. +- Each 2.x PR should be labeled with `2.x-dev` label. + +Inside each PR, put a link to the corresponding PR. + +Of course if you want to contribute, feel free to make a PR to only 2.x/develop or develop, based on what you are using. While reviewing the PRs, we might convert the code to corresponding PR for the other release ourselves. + +We might also change the target of you PR to and intermediate branch, rather than `develop` if we feel it requires some extra work on our end. That way, we preserve all your commits so you don't loos out on the contribution credits. + + + + +If a PR is targeted at 2.x release it must be labelled with 2x-dev label in Github. \ No newline at end of file diff --git a/website/docs/dev_requirements.md b/website/docs/dev_requirements.md new file mode 100644 index 0000000000..bbf3b1fb5b --- /dev/null +++ b/website/docs/dev_requirements.md @@ -0,0 +1,131 @@ +--- +id: dev_requirements +title: Requirements +sidebar_label: Requirements +--- + + +We aim to closely follow [**VFX Reference Platform**](https://vfxplatform.com/) + +OpenPype is written in Python 3 with specific elements still running in Python2 until all DCCs are fully updated. To see the list of those, that are not quite there yet, go to [VFX Python3 tracker](https://vfxpy.com/) + +The main things you will need to run and build pype are: + +- **Terminal** in your OS + - PowerShell 5.0+ (Windows) + - Bash (Linux) +- [**Python 3.7.8**](#python) or higher +- [**MongoDB**](#database) + + +## OS + +It can be built and ran on all common platforms. We develop and test on the following: + +- **Windows** 10 +- **Linux** + - **Ubuntu** 20.04 LTS + - **Centos** 7 +- **Mac OSX** + - **10.15** Catalina + - **11.1** Big Sur (using Rosetta2) + + +## Database + +Pype needs site-wide installation of **MongoDB**. It should be installed on +reliable server, that all workstations (and possibly render nodes) can connect. This +server holds **Avalon** database that is at the core of everything + +Depending on project size and number of artists working connection speed and +latency influence performance experienced by artists. If remote working is required, this mongodb +server must be accessible from Internet or cloud solution can be used. Reasonable backup plan +or high availability options are recommended. *Replication* feature of MongoDB should be considered. This is beyond the +scope of this documentation, please refer to [MongoDB Documentation](https://docs.mongodb.com/manual/replication/). + +Pype can run it's own instance of mongodb, mostly for testing and development purposes. +For that it uses locally installed MongoDB. + +Download it from [mognoDB website](https://www.mongodb.com/download-center/community), install it and +add to the `PATH`. On Windows, Pype tries to find it in standard installation destination or using `PATH`. + +To run mongoDB on server, use your server distribution tools to set it up (on Linux). + +## Python + +**Python 3.7.8** is the recommended version to use (as per [VFX platform CY2021](https://vfxplatform.com/)). + +If you're planning to run openPYPE on workstations from built executables (highly recommended), you will only need python for building and development, however, if you'd like to run from source centrally, every user will need python installed. + +## Hardware + +openPYPE should be installed on all workstations that need to use it, the same as any other application. + +There are no specific requirements for the hardware. If the workstation can run +the major DCCs, it most probably can run openPYPE. + +Installed, it takes around 400MB of space, depending on the platform + + +For well functioning ftrack event server, we recommend a linux virtual server with Ubuntu or CentOS. CPU and RAM allocation needs differ based on the studio size, but a 2GB of ram, with a dual core CPU and around 4GB of storage should suffice + + +## Deployment + +For pushing pipeline updates to the artists, you will need to create a shared folder that +will be accessible with at least Read permission to every OpenPype user in the studio. +This can also be hosted on the cloud in fully distributed deployments. + + + +## Dependencies + +### Key projects we depend on + +- [**Avalon**](https://github.com/getavalon) +- [**Pyblish**](https://github.com/pyblish) +- [**OpenTimelineIO**](https://github.com/PixarAnimationStudios/OpenTimelineIO) +- [**OpenImageIO**](https://github.com/OpenImageIO/oiio) +- [**FFmpeg**](https://github.com/FFmpeg/FFmpeg) + + +### Python modules we use and their licenses + +| Package | License | +|-------------------------------------|--------------------------------------------------------------| +| acre 1.0.0 | GNU Lesser General Public License v3 (LGPLv3) | +| aiohttp 3.7.3 | Apache 2 | +| aiohttp-json-rpc 0.13.3 | Apache 2.0 | +| appdirs 1.4.4 | MIT | +| blessed 1.17.12 | MIT | +| click 7.1.2 | BSD-3-Clause | +| clique 1.5.0 | Apache License (2.0) | +| coverage 5.3.1 | Apache 2.0 | +| cx-Freeze 6.5.1 | Python Software Foundation License | +| docutils 0.16 | public domain, Python, 2-Clause BSD, GPL 3 (see COPYING.txt) | +| flake8 3.8.4 | MIT | +| ftrack-python-api 2.0.0 | Apache License (2.0) | +| jinxed 1.0.1 | MPLv2.0 +| log4mongo 1.7.0 | BSD | +| OpenTimelineIO 0.14.0.dev1 | Modified Apache 2.0 License | +| Pillow 8.1.0 | HPND | +| pyblish-base 1.8.8 | LGPL | +| pycodestyle 2.6.0 | Expat license | +| pydocstyle 5.1.1 | MIT | +| pylint 2.6.0 | GPL | +| pymongo 3.11.2 | Apache License, Version 2.0 | +| pynput 1.7.2 | LGPLv3 | +| PyQt5 5.15.2 | GPL v3 | +| pytest 6.2.1 | MIT | +| pytest-cov 2.11.0 | MIT | +| pytest-print 0.2.1 | MIT | +| pywin32-ctypes 0.2.0 | BSD | +| Qt.py 1.3.2 | MIT | +| six 1.15.0 | MIT | +| speedcopy 2.1.0 | UNKNOWN | +| Sphinx 3.4.3 | BSD | +| sphinx-qt-documentation 0.3 | BSD-3-Clause | +| sphinxcontrib-websupport 1.2.4 | BSD | +| tqdm 4.56.0 | MPLv2.0, MIT Licences | +| wheel 0.36.2 | MIT | +| wsrpc-aiohttp 3.1.1 | Apache Software License | diff --git a/website/docs/features.md b/website/docs/features.md new file mode 100644 index 0000000000..c6413a7aa7 --- /dev/null +++ b/website/docs/features.md @@ -0,0 +1,261 @@ +## Pype tray + +Ftrack + +Login + +reset Action server + +Launcher: Launch applications without the need of going through ftrack website + +Library: Browse through all the published assets across the projects. You can also launch actions. + +Standalone Publisher + +Services + +Idle manager + +Timers manager + +Statics server + +## System Admin + +Manage environments per project/shot/tasks + +Centralized pipeline installation + +Localized Python environment (for speed purposes) + +Automatic user environment updates (online/offline) + +Ability to run completely offline for TPN and MPAA certified sites + +Git controlled deployment + +Separated development and production installation for safety and testing + +Per project pipeline configuration overrides + +Linux, Windows, Mac support + +MongoDB backbone + +## Ftrack + +Launch applications + +Custom actions + +Create base project structure + +Create Folders + +Sync to Avalon + +Propagate Thumbnails + +Create required custom attributes + +launch version in RV / DJVview + +Delete assets and subsets + +Sort Client Review + +Kill old Ftrack jobs + +Event server (automatically triggered actions) + +Sync to Avalon + +Update status on the next task + +Propagate Thumbnails from version to tasks and assets/shots + +Propagate statuses between versions and tasks + +Avalon <-> Ftrack sync + +## Maya + +### Tools + +Creator + +Publisher + +Loader + +Scene Inventory + +Look assigner + +Workfiles + +### Families + +Model + +Look + +Rig + +Animation + +Cache + +Camera + +Assembly + +MayaAscii (generic scene) + +Setdress + +RenderSetup + +Review + +arnoldStandin + +vrayProxy + +vrayScene + +yetiCache + +yetiRig + +## Houdini + +### Tools + +Creator + +Publisher + +Loader + +Scene Inventory + +Look assigner + +Workfiles + +### Families + +Model + +Animation + +Cache + +Camera + +Review + +## Nuke + +Tools + +Publisher + +Loader + +Scene Inventory + +Workfiles + +Families + +Model (load only) + +Camera (load only) + +Render + +Review + +Plate + +Prerender + +## NukeStudio + +Create Shots in Ftrack and Avalon + +handles + +frame ranges + +edit in and edit out + +Publish Plates + +Any number of plate + +colorspace managed + +Attach preview quicktimes to Ftrack versions + +## Fusion + +Tools + +Publisher + +Loader + +Scene Inventory + +Workfiles + +Families + +Model (load only) + +Camera (load only) + +Render + +Review + +Plate + +Prerender + +## Deadline + +Publish to deadline from + +Maya + +Nuke + +Create preview quicktimes from rendered frames + +publish rendered outputs to Avalon and Ftrack + + ## Muster + +Publish to deadline from + +Maya + +Nuke + +Create preview quicktimes from rendered frames + +publish rendered outputs to Avalon and Ftrack + +## Clockify + +Automatic timer start and stop in sync with Ftrack. + +## Arnold + +## Vray + +## Redshift diff --git a/website/docs/hosts-maya.md b/website/docs/hosts-maya.md new file mode 100644 index 0000000000..0ee0c2d86b --- /dev/null +++ b/website/docs/hosts-maya.md @@ -0,0 +1,33 @@ +### Tools +Creator +Publisher +Loader +Scene Inventory +Look assigner +Workfiles + +### Plugins +Deadline +Muster +Yeti +Arnold +Vray +Redshift + +### Families +Model +Look +Rig +Animation +Cache +Camera +Assembly +MayaAscii (generic scene) +Setdress +RenderSetup +Review +arnoldStandin +vrayProxy +vrayScene +yetiCache +yetiRig diff --git a/website/docs/manager_ftrack.md b/website/docs/manager_ftrack.md new file mode 100644 index 0000000000..1583e74db2 --- /dev/null +++ b/website/docs/manager_ftrack.md @@ -0,0 +1,62 @@ +--- +id: manager_ftrack +title: Ftrack +sidebar_label: Project Manager +--- + +Ftrack is currently the main project management option for OpenPype. This documentation assumes that you are familiar with Ftrack and it's basic principles. If you're new to Ftrack, we recommend having a thorough look at [Ftrack Official Documentation](http://ftrack.rtd.ftrack.com/en/stable/). + +## Project management +Setting project attributes is the key to properly working pipeline. + +### New Project +The best practice for creating a new project ready for OpenPype. +1. First of all you must [create a project](http://ftrack.rtd.ftrack.com/en/stable/using/managing_projects/creating_a_new_project.html) in Ftrack. +2. All the required attributes need to be populated. The easiest way to do it is by using [Prepare Project](manager_ftrack_actions#prepare-project) action. + +:::tip +Do not forget to set up `applications` and `tools`, otherwise users won't be able to launch applications. +::: + +3. Now you can create Project hierarchy with shots, assets, tasks and others, which has [specific rules](#synchronization-rules). [Create Project Structure](manager_ftrack_actions#create-project-structure) action may help you with this step. +4. Last step is to [synchronize](#synchronization-to-avalon-database) project to Avalon database. + +:::tip +Turn on `auto-sync` attribute on your project in ftrack. That way you'll only need to synchronise the project once and all further changes will be propagated automatically. +::: + +## Synchronization to Avalon database +This process describes how data from Ftrack will get into Avalon database. + +### How to synchronize +You can trigger synchronization manually using [Sync To Avalon](manager_ftrack_actions#sync-to-avalon) action. + +Synchronization can also be automated with OpenPype's [event server](#event-server) and synchronization events. If your Ftrack is [prepared for OpenPype](#prepare-ftrack-for-openpype), the project should have custom attribute `Avalon auto-sync`. Check the custom attribute to allow auto-updates with event server. + +:::tip +Always use `Sync To Avalon` action before you enable `Avalon auto-sync`! +::: + +:::important +Synchronization actions and events can show you interface with information when something goes differently than expected. Just read carefully what happened messages should guide you. +::: + +### Synchronization rules +Required: +- entity can only contain **letters**, **numbers** and **underscore** symbols. *(In technical terms: all names must match regex: `^[a-zA-Z0-9_.]*$`)`* + +Not allowed: +- duplicated entity names within project (there can be only one shot with name "sh0010" in whole project for example) +- have any **Tasks** directly on the *Project* level + +### Managing Entities + +There are certain situations that are very hard, or even impossible to handle automatically and will have be resolved by your TD. These include + +- Deleting shots and assets after some data has already been published in them. +- Re-structuring the project hierarchy when work is already being done. +- Renaming the Project + +If you need to move entity or change its name it is possible only in the acse when no-one has worked on it yet. Once work is in progreess, you must archive the old one and create new. + +To archive entities you should use [Archive Asset/Subset](manager_ftrack_actions#delete-asset/subset) action. This will remove the selected entity from ftrack and avalon database in mostly non-destructive way, so it can be recovered later. To completely delete all traces of this such entity you'll need to go to OpenPype archive and delete them from there. diff --git a/website/docs/manager_ftrack_actions.md b/website/docs/manager_ftrack_actions.md new file mode 100644 index 0000000000..aa4c554614 --- /dev/null +++ b/website/docs/manager_ftrack_actions.md @@ -0,0 +1,316 @@ +--- +id: manager_ftrack_actions +title: Ftrack Actions +sidebar_label: Ftrack actions +--- + +Actions are small useful tools that help artists, managers and administrators. +To avoid overfilled action menu some actions are filtered by entity types and some of them by user roles permissions. + +In most cases actions filtered by entity type: +- Project +- Typed Context + - Folder + - Episode + - Sequence + - Shot + - Library + - Asset Build + - Asset Variant + - Epic + - Milestone +- Task +- Asset Version +- Component +- Review Session + +*Typed Context* is global Ftrack entity for hierarchical types representing all of them. Hierarchical types can be used for filtering too, but because they are dynamic *(you can add, modify and remove them)*, *Typed Context* is used to be more general. + +So if you do not see action you need to use check if action is available for selected *entity type* or ask *administrator* to check if you have permissions to use it. + +:::note +Actions can be heavily customised by your studio, so this guide might not fit 100 %. +::: + +:::important +Filtering can be more complicated for example a lot of actions can be shown only when one particular entity is selected. +::: + +--- +## Applications + +### Launch applications +* Entity types: Task +* User roles: All + +These actions *launch application with OpenPype * and *start timer* for the selected Task. We recommend you to launch application this way. + +:::important +Project Manager or Supervisor must set project's applications during project preparation otherwise you won't see them. Applications can be added even if the project is in progress. +::: + +--- +
+
+ +## OpenPype Admin + +
+
+ +![pype_admin-icon](assets/ftrack/ftrack-pype_admin-icon.png) + +
+
+ + +#### A group of actions that are used for OpenPype Administration. + +### Sync to Avalon +* Entity types: Project, Typed Context +* User roles: Pypeclub, Administrator, Project manager + +Synchronization to Avalon is key process to keep OpenPype data updated. Action updates selected entities (Project, Shot, Sequence, etc.) and all nested entities to Avalon database. If action is successfully finished [Sync Hier Attrs](#sync-hier-attrs) action is triggered. + +There are 2 versions of **Sync to Avalon**, first labeled as **server** second as **local**. +* **server** version will be processed with [event server](module_ftrack#event-server) +* **local** version will be processed with user's OpenPype tray application + +It is recommended to use **local** version if possible to avoid unnecessary deceleration of event server. + +### Sync Hier Attrs +* Entity types: Project, Typed Context +* User roles: Pypeclub, Administrator, Project manager + +Synchronization to Avalon of Ftrack's hierarchical Custom attributes is a bit complicated so we decided to split synchronization process into 2 actions. This action updates hierarchical Custom attributes of selected entities (Project, Shot, Sequence, etc.) and all their nested entities to pipeline database. This action is also triggered automatically after successfully finished [Sync To Avalon](#sync-to-avalon) action. + +There are 2 versions of **Sync Hier Attrs** first labeled as **server** second as **local**. +* **server** version will be processed with [event server](module_ftrack#event-server) +* **local** version will be processed with user's OpenPype application + +It is recommended to use **local** version if possible to avoid unnecessary deceleration of event server. + +### Job Killer +* Entity types: All +* User roles: Pypeclub, Administrator + +Custom Jobs in Ftrack help to track process and status of triggered actions but sometimes unexpected failure of action may happen *(Please let us know when happens)*. The failure will cause that job's status will remain set to **Running** which may cause issues in future. + +This action gives ability to *stop running jobs*. When action is triggered, an interface with all running jobs with checkbox next to each is shown. Status of checked jobs will be set to **Failure** on submit. + +### Delete Assets by Name +* Entity types: Typed Context, Task +* User roles: Pypeclub, Administrator + +With this action it's possible to delete up to 15 entities at once from active project in pipeline database. Entered names must match exactly the names stored in database. These entities also must not have children entities *(Sequence must not have Shots but Task is not entity)*. + +--- +
+
+ +## Prepare Project + +
+
+ +![prepare_project-icon](assets/ftrack/ftrack-prepare_project-icon.png) + +
+
+ +* Entity types: Project +* User roles: Pypeclub, Administrator, Project manager + +Allows project managers and coordinator to *set basic project attributes* needed for OpenPype to operate, *Create project folders* if you want and especially prepare project specific [settings](admin_settings_project). + +:::tip +It is possible to use this action during the lifetime of a project but we recommend using it only once at the start of the project. +::: + +![prepare_project_1](assets/ftrack/ftrack-prepare_project_1-small.png) + +--- +
+
+ +## Multiple Notes + +
+
+ +![multiple_notes-icon](assets/ftrack/ftrack-multiple_notes-icon.png) + +
+
+ +* Entity types: Asset Version +* User roles: All + +You can add same note to multiple Asset Versions at once with this action. +![multiple_notes_1](assets/ftrack/ftrack-multiple_notes_1-small.png) + +--- +
+
+ +## Delete Asset/Subset + +
+
+ +![delete_asset-icon](assets/ftrack/ftrack-delete_asset-icon.png) + +
+
+ +* Entity types: Typed Context, Task +* User roles: Pypeclub, Administrator + +Action deletes Entities and Asset Versions from Ftrack and Avalon database. + +You should use this action if you need to delete Entities or Asset Versions otherwise deletion will not take effect in Avalon database. Currently the action allows to only delete one entity at the time. Entity also must not have any children. + +--- +
+
+ +## Create Project Structure + +
+
+ +![create_project_folders-icon](assets/ftrack/ftrack-create_project_folders-icon.png) + +
+
+ +* Entity types: Project +* User roles: Pypeclub, Administrator + +*Create Project Structure* helps to create basic folder structure and may create the main ftrack entities for the project. + +Structure is loaded from settings *(OpenPype Settings β†’ Project β†’ Global β†’ Project Folder Structure)*. You should examine these settings to see how it works. Settings may contain dictionaries of nested dictionaries where each key represents a folder name. Key and all it's parents will be also created in Ftrack if the key ends with `[ftrack]`. Default Ftrack entity type is *Folder* but entity type can be specified using `[ftrack.{entity type}]`. To create *Sequence* with name *Seq_001* key should look like `Seq_001[ftrack.Sequence]`. + +:::note +Please keep in mind this action is meant to make your project setup faster at the very beginning, but it does not create folders for each shot and asset. For creating asset folder refer to `Create Folders` Action +::: + +--- +
+
+ +## Delivery + +
+
+ +![ftrack-delivery-icon](assets/ftrack/ftrack-delivery-icon.png) + +
+
+ +* Entity types: Task +* User roles: Pypeclub, Project manager, Administrator + +Collects approved hires files and copy them into a folder. It takes any components of any versions and copies and renames them correctly. + + +--- +
+
+ +## Create Folders + +
+
+ +![create_folders-icon](assets/ftrack/ftrack-create_folders-icon.png) + +
+
+ +* Entity types: Typed Context, Task +* User roles: All + +It is usually not necessary to launch this action because folders are created automatically every time you start working on a task. However it can be handy if you need to create folders before any work begins or you want to use applications that don't have pipeline implementation. + +--- +
+
+ +## Thumbnail + +
+
+ +![thumbnail-icon](assets/ftrack/ftrack-thumbnail-icon.png) + +
+
+ +A group of actions for thumbnail management. + +### Thumbnail to Parent +Propagates the thumbnail of the selected entity to its parent. + +### Thumbnail to Children +Propagates the thumbnail of the selected entity to its first direct children entities. + +--- +## RV +* Entity types: All +* User roles: All + +You can launch RV player with playable components from selected entities. You can choose which components will be played. + +:::important +You must have RV player installed and licensed and have correct RV environments set to be able use this action. +::: + +--- +## DJV View +* Entity types: Task, Asset Version +* User roles: All + +You can launch DJV View with one playable component from selected entities. You can choose which component will be played. + +:::important +You must have DJV View installed and configured in studio-config to be able use this action. +::: + +--- +
+
+ +## Open File + +
+
+ +![component_open-icon](assets/ftrack/ftrack-component_open-icon.png) + +
+
+ +* Entity types: File Component +* User roles: All + +This action will open folder of selected *Component* on *Asset Version*. + +:::warning +Does not work for components uploaded to Ftrack Web server. +::: +![component_open_1](assets/ftrack/ftrack-component_open_1-small.png) + +:::warning +Component's path must be accessible by current OS. +::: + +--- +## Sort Review +* Entity types: Review Session +* User roles: All + +Helps you sort *Asset Versions* in *Client Review Session*. + +Asset Versions are sorted by *Version number*, *Task name* and *Version name*. diff --git a/website/docs/manager_naming.md b/website/docs/manager_naming.md new file mode 100644 index 0000000000..bf822fbeb4 --- /dev/null +++ b/website/docs/manager_naming.md @@ -0,0 +1,56 @@ +--- +id: manager_naming +title: Naming Conventions +sidebar_label: Naming Conventions +--- + +:::note +This naming convention holds true for most of our pipeline. Please match it as close as possible even for projects and files that might be outside of pipeline scope at this point. Small errors count! The reason for given formatting is to allow people to understand the file at glance and that a script or a program can easily get meaningful information from your files without errors. +::: + +## General rules + +For more detailed rules and different file types, have a look at naming conventions for scenes and assets + +- Every file starts with file code based on a project it belongs to e.g. β€˜tst_’, β€˜drm_’ +- Optional subversion and comment always comes after the major version. v##.subversion_comment. +- File names can only be composed of letters, numbers, underscores `_` and dots β€œ.” +- You can use snakeCase or CamelCase if you need more words in a section.Β  thisIsLongerSentenceInComment +- No spaces in filenames. Ever! +- Frame numbers are always separated by a period ”.” +- If you're not sure use this template: + +## Work files + +**`{code}_{shot}_{task}_v001.ext`** + +**`{code}_{asset}_{task}_v001.ext`** + +**Examples:** + + prj_sh010_enviro_v001.ma + prj_sh010_animation_v001.ma + prj_sh010_comp_v001.nk + + prj_bob_modelling_v001.ma + prj_bob_rigging_v001.ma + prj_bob_lookdev_v001.ma + +:::info +In all of the examples anything enclosed in curly brackets Β { }Β is compulsory in the name. +Anything in square bracketsΒ [ ]Β is optional. +::: + +## Published Assets + +**`{code}_{asset}_{family}_{subset}_{version}_[comment].ext`** + +**Examples:** + + prj_bob_model_main_v01.ma + prj_bob_model_hires_v01.ma + prj_bob_model_main_v01_clothes.ma + prj_bob_model_main_v01_body.ma + prj_bob_rig_main_v01.ma + Prj_bob_look_main_v01.ma + Prj_bob_look_wet_v01.ma diff --git a/website/docs/module_clockify.md b/website/docs/module_clockify.md new file mode 100644 index 0000000000..f1020ab818 --- /dev/null +++ b/website/docs/module_clockify.md @@ -0,0 +1,10 @@ +--- +id: module_clockify +title: Clockify Administration +sidebar_label: Clockify +--- + +import Tabs from '@theme/Tabs'; +import TabItem from '@theme/TabItem'; + + diff --git a/website/docs/module_deadline.md b/website/docs/module_deadline.md new file mode 100644 index 0000000000..b4cf85abc2 --- /dev/null +++ b/website/docs/module_deadline.md @@ -0,0 +1,76 @@ +--- +id: module_deadline +title: Deadline Administration +sidebar_label: Deadline +--- + +import Tabs from '@theme/Tabs'; +import TabItem from '@theme/TabItem'; + + +## Preparation + +For [AWS Thinkbox Deadline](https://www.awsthinkbox.com/deadline) support you need to set a few things up in both OpenPype and Deadline itself + +1. Deploy OpenPype executable to all nodes of Deadline farm. See [Install & Run](admin_use) + +2. Enable Deadline module it in the [settings](admin_settings_system#deadline) + +3. Set up *Deadline Web API service*. For more details on how to do it, see [here](https://docs.thinkboxsoftware.com/products/deadline/10.0/1_User%20Manual/manual/web-service.html). + +4. Point OpenPype to your deadline webservice URL in the [settings](admin_settings_system#deadline) + +5. Install our custom plugin, event plugin and scripts to your deadline repository. It should be as simple as copying content of `openPype/vendor/deadline/custom` to `path/to/your/deadline/repository/custom` + + +## Configuration + +OpenPype integration with Deadline consists of two parts, event listener which gets triggered for each job and + handles populating rendering jobs with proper environment variables and +OpenPype plugin which handles publishing itself. + +Both need to be configured by pointing DL to OpenPype executables. These executables need to be installed to +destinations accessible by DL process. Check permissions (must be executable and accessible by Deadline process) + +- Enable `Tools > Super User Mode` in Deadline Monitor + +- Go to `Tools > Configure plugins`, find `OpenPype` in the list on the left side, find location of OpenPype +executable. It is recommended to use the `openpype_console` executable as it provides a bit more logging. + +- In case of multi OS farms, provide multiple locations, each node goes through a list and tries to find accessible + locations for itself. + +![Configure plugin](assets/deadline_configure_plugin.png) + +- Go to `Tools > Configure events`, find `OpenPype` in the list on the left side, find location of OpenPype +executable. It is recommended to use the `openpype_console` executable as it provides a bit more logging. + +- State is expected to be set to `Global Enabled` + +![Configure event](assets/deadline_configure_event.png) + +Path to executables needs to be configured on both, plugin and event level! + +## Troubleshooting + +#### Publishing jobs fail directly in DCCs + +- Double check that all previously described steps were finished +- Check that `deadlinewebservice` is running on DL server +- Check that user's machine has access to deadline server on configured port + +#### Jobs are failing on DL side + +Each publishing from OpenPype consists of 2 jobs, first one is rendering, second one is the publishing job (triggered after successful finish of the rendering job). + +![Jobs in DL](assets/deadline_fail.png) + +- Jobs are failing with `OpenPype executable was not found` error + + Check if OpenPype is installed on a node handling this job, plugin and events are properly [configured](#configuration) + +- Publishing job is failing with `ffmpeg not installed` error + + OpenPype executable has to have access to `ffmpeg` executable, check OpenPype `Setting > General` + +![FFmpeg setting](assets/ffmpeg_path.png) \ No newline at end of file diff --git a/website/docs/module_ftrack.md b/website/docs/module_ftrack.md new file mode 100644 index 0000000000..bd0dbaef4f --- /dev/null +++ b/website/docs/module_ftrack.md @@ -0,0 +1,216 @@ +--- +id: module_ftrack +title: Ftrack Administration +sidebar_label: Ftrack +--- + +import Tabs from '@theme/Tabs'; +import TabItem from '@theme/TabItem'; + + +Ftrack is currently the main project management option for OpenPype. This documentation assumes that you are familiar with Ftrack and it's basic principles. If you're new to Ftrack, we recommend having a thorough look at [Ftrack Official Documentation](http://ftrack.rtd.ftrack.com/en/stable/). + +## Prepare Ftrack for OpenPype + +### Server URL +If you want to connect Ftrack to OpenPype you might need to make few changes in Ftrack settings. These changes would take a long time to do manually, so we prepared a few Ftrack actions to help you out. First, you'll need to launch OpenPype settings, enable [Ftrack module](admin_settings_system#Ftrack), and enter the address to your Ftrack server. + +### Login +Once your server is configured, restart OpenPype and you should be prompted to enter your [Ftrack credentials](artist_ftrack#How-to-use-Ftrack-in-OpenPype) to be able to run our Ftrack actions. If you are already logged in to Ftrack in your browser, it is enough to press `Ftrack login` and it will connect automatically. + +For more details step by step on how to login to Ftrack in OpenPype to go [artist Ftrack login](artist_ftrack#How-to-use-Ftrack-in-OpenPype) documentation. + +You can only use our Ftrack Actions and publish to Ftrack if each artist is logged in. + + +### Custom Attributes +After successfully connecting OpenPype with you Ftrack, you can right click on any project in Ftrack and you should see a bunch of actions available. The most important one is called `OpenPype Admin` and contains multiple options inside. + +To prepare Ftrack for working with OpenPype you'll need to run [OpenPype Admin - Create/Update Avalon Attributes](manager_ftrack_actions#create-update-avalon-attributes), which creates and sets the Custom Attributes necessary for OpenPype to function. + + + +## Event Server +Ftrack Event Server is the key to automation of many tasks like _status change_, _thumbnail update_, _automatic synchronization to Avalon database_ and many more. Event server should run at all times to perform the required processing as it is not possible to catch some of them retrospectively with enough certainty. + +### Running event server +There are specific launch arguments for event server. With `openpype eventserver` you can launch event server but without prior preparation it will terminate immediately. The reason is that event server requires 3 pieces of information: _Ftrack server url_, _paths to events_ and _credentials (Username and API key)_. Ftrack server URL and Event path are set from OpenPype's environments by default, but the credentials must be done separatelly for security reasons. + + + +:::note There are 2 ways of passing your credentials to event server. + + + + + +- **`--ftrack-user "your.username"`** : Ftrack Username +- **`--ftrack-api-key "00000aaa-11bb-22cc-33dd-444444eeeee"`** : User's API key +- **`--store-crededentials`** : Entered credentials will be stored for next launch with this argument _(It is not needed to enter **ftrackuser** and **ftrackapikey** args on next launch)_ +- **`--no-stored-credentials`** : Stored credentials are loaded first so if you want to change credentials use this argument +- `--ftrack-url "https://yourdomain.ftrackapp.com/"` : Ftrack server URL _(it is not needed to enter if you have set `FTRACK_SERVER` in OpenPype' environments)_ +- `--ftrack-events-path "//Paths/To/Events/"` : Paths to events folder. May contain multiple paths separated by `;`. _(it is not needed to enter if you have set `FTRACK_EVENTS_PATH` in OpenPype' environments)_ + +So if you want to use OpenPype's environments then you can launch event server for first time with these arguments `openpype.exe eventserver --ftrack-user "my.username" --ftrack-api-key "00000aaa-11bb-22cc-33dd-444444eeeee" --store-credentials`. Since that time, if everything was entered correctly, you can launch event server with `openpype.exe eventserver`. + + + + +- `FTRACK_API_USER` - Username _("your.username")_ +- `FTRACK_API_KEY` - User's API key _("00000aaa-11bb-22cc-33dd-444444eeeee")_ +- `FTRACK_SERVER` - Ftrack server url _(")_ +- `FTRACK_EVENTS_PATH` - Paths to events _("//Paths/To/Events/")_ + We do not recommend you this way. + + + +::: + +:::caution +We do not recommend setting your Ftrack user and api key environments in a persistent way, for security reasons. Option 1. passing them as arguments is substantially safer. +::: + +### Where to run event server + +We recommend you to run event server on stable server machine with ability to connect to Avalon database and Ftrack web server. Best practice we recommend is to run event server as service. It can be Windows or Linux. + +:::important +Event server should **not** run more than once! It may cause major issues. +::: + +### Which user to use + +- must have at least `Administrator` role +- the same user should not be used by an artist + + +:::note How to create Eventserver service + + + + +- create file: + `sudo vi /opt/OpenPype/run_event_server.sh` +- add content to the file: +```sh +#!\usr\bin\env +export OPENPYPE_DEBUG=3 +pushd /mnt/pipeline/prod/openpype-setup +. openpype eventserver --ftrack-user --ftrack-api-key +``` +- create service file: + `sudo vi /etc/systemd/system/openpype-ftrack-event-server.service` +- add content to the service file + +```toml +[Unit] +Description=Run OpenPype Ftrack Event Server Service +After=network.target + +[Service] +Type=idle +ExecStart=/opt/openpype/run_event_server.sh +Restart=on-failure +RestartSec=10s + +[Install] +WantedBy=multi-user.target +``` + +- change file permission: + `sudo chmod 0755 /etc/systemd/system/openpype-ftrack-event-server.service` + +- enable service: + `sudo systemctl enable openpype-ftrack-event-server` + +- start service: + `sudo systemctl start openpype-ftrack-event-server` + + + + +- create service file: `openpype-ftrack-eventserver.bat` +- add content to the service file: +```sh +@echo off +set OPENPYPE_DEBUG=3 +pushd \\path\to\file\ +call openpype.bat eventserver --ftrack-user --ftrack-api-key +``` +- download and install `nssm.cc` +- create Windows service according to nssm.cc manual +- you can also run eventserver as a standard Schedule task +- be aware of using UNC path + + + +::: + +* * * + +## Ftrack events + +Events are helpers for automation. They react to Ftrack Web Server events like change entity attribute, create of entity, etc. + +### Sync to Avalon + +Automatic [synchronization to pipeline database](manager_ftrack#synchronization-to-avalon-database). + +This event updates entities on their changes Ftrack. When new entity is created or existing entity is modified. Interface with listing information is shown to users when [synchronization rules](manager_ftrack#synchronization-rules) are not met. This event may also undo changes when they might break pipeline. Namely _change name of synchronized entity_, _move synchronized entity in hierarchy_. + +:::important +Deleting an entity by Ftrack's default is not processed for security reasons _(to delete entity use [Delete Asset/Subset action](manager_ftrack_actions#delete-asset-subset))_. +::: + +### Synchronize Hierarchical and Entity Attributes + +Auto-synchronization of hierarchical attributes from Ftrack entities. + +Related to [Synchronize to Avalon database](manager_ftrack#synchronization-to-avalon-database) event _(without it, it makes no sense to use this event)_. Hierarchical attributes must be synchronized with special way so we needed to split synchronization into 2 parts. There are [synchronization rules](manager_ftrack#synchronization-rules) for hierarchical attributes that must be met otherwise interface with messages about not meeting conditions is shown to user. + +### Update Hierarchy thumbnails + +Push thumbnails from version, up through multiple hierarchy levels + +### Update status on task action + +Change status of next task from `Not started` to `Ready` when previous task is approved. + +Multiple detailed rules for next task update can be configured in the settings. + +### Delete Avalon ID from new entity + +Is used to remove value from `Avalon/Mongo Id` Custom Attribute when entity is created. + +`Avalon/Mongo Id` Custom Attribute stores id of synchronized entities in pipeline database. When user _Copy β†’ Paste_ selection of entities to create similar hierarchy entities, values from Custom Attributes are copied too. That causes issues during synchronization because there are multiple entities with same value of `Avalon/Mongo Id`. To avoid this error we preventively remove these values when entity is created. + +### Sync status from Task to Parent + +List of parent boject types where this is triggered ("Shot", "Asset build", etc. Skipped if it is empty) + +### Sync status from Version to Task + +Updates Task status based on status changes on its Asset Version. + +The issue this solves is when Asset version's status is changed but the artist assigned to Task is looking at the task status, thus not noticing the review. + +This event makes sure statuses Asset Version get synced to it's task. After changing a status on version, this event first tries to set identical status to version's parent (usually task). But this behavior can be tweaked in settings. + +### Sync status on first created version + +This event handler allows setting of different status to a first created Asset Version in Ftrack. + +This is usefull for example if first version publish doesn't contain any actual reviewable work, but is only used for roundtrip conform check, in which case this version could receive status `pending conform` instead of standard `pending review` + +### Update status on next task +Change status on next task by task types order when task status state changed to "Done". All tasks with the same Task mapping of next task status changes From β†’ To. Some status can be ignored. diff --git a/website/docs/module_muster.md b/website/docs/module_muster.md new file mode 100644 index 0000000000..28c4b33aa8 --- /dev/null +++ b/website/docs/module_muster.md @@ -0,0 +1,10 @@ +--- +id: module_muster +title: Muster Administration +sidebar_label: Muster +--- + +import Tabs from '@theme/Tabs'; +import TabItem from '@theme/TabItem'; + + diff --git a/website/docs/module_site_sync.md b/website/docs/module_site_sync.md new file mode 100644 index 0000000000..1b728e151a --- /dev/null +++ b/website/docs/module_site_sync.md @@ -0,0 +1,10 @@ +--- +id: module_site_sync +title: Site Sync Administration +sidebar_label: Site Sync +--- + +import Tabs from '@theme/Tabs'; +import TabItem from '@theme/TabItem'; + + diff --git a/website/docs/project_settings/assets/global_extract_review_letter_box.png b/website/docs/project_settings/assets/global_extract_review_letter_box.png new file mode 100644 index 0000000000..7cd9ecbdd6 Binary files /dev/null and b/website/docs/project_settings/assets/global_extract_review_letter_box.png differ diff --git a/website/docs/project_settings/assets/global_extract_review_letter_box_settings.png b/website/docs/project_settings/assets/global_extract_review_letter_box_settings.png new file mode 100644 index 0000000000..9ad9c05f43 Binary files /dev/null and b/website/docs/project_settings/assets/global_extract_review_letter_box_settings.png differ diff --git a/website/docs/project_settings/assets/global_extract_review_output_defs.png b/website/docs/project_settings/assets/global_extract_review_output_defs.png new file mode 100644 index 0000000000..0dc8329324 Binary files /dev/null and b/website/docs/project_settings/assets/global_extract_review_output_defs.png differ diff --git a/website/docs/project_settings/assets/global_extract_review_profiles.png b/website/docs/project_settings/assets/global_extract_review_profiles.png new file mode 100644 index 0000000000..1b91786ff6 Binary files /dev/null and b/website/docs/project_settings/assets/global_extract_review_profiles.png differ diff --git a/website/docs/project_settings/settings_project_global.md b/website/docs/project_settings/settings_project_global.md new file mode 100644 index 0000000000..a90e5caeef --- /dev/null +++ b/website/docs/project_settings/settings_project_global.md @@ -0,0 +1,69 @@ +--- +id: settings_project_global +title: Project Global Setting +sidebar_label: Global +--- + +import Tabs from '@theme/Tabs'; +import TabItem from '@theme/TabItem'; + +Project settings can have project specific values. Each new project is using studio values defined in **default** project but these values can be modified or overriden per project. + +:::warning Default studio values +Projects always use default project values unless they have [project override](../admin_settings#project-overrides) (orage colour). Any changes in default project may affect all existing projects. +::: + +## Publish plugins + +Publish plugins used across all integrations. + +### Extract Review +Plugin responsible for automatic FFmpeg conversion to variety of formats. + +Extract review is using profile filtering to be able render different outputs for different situations. + +**Profile filters** + +You can define multiple profiles for different contexts. Profile with filters matching the current context the most, is used. You can define profile without filters and use it as **default**. Only **one or none** profile will be processed per instance. + +All context filters are lists which may contain strings or Regular expressions (RegEx). +- **`hosts`** - Host from which publishing was triggered. `["maya", "nuke"]` +- **`families`** - Main family of processed instance. `["plate", "model"]` + +:::important Filtering +Filters are optional. In case when multiple profiles match current context, profile with higher number of matched filters has higher priority that profile without filters. +::: + +![global_extract_review_profiles](assets/global_extract_review_profiles.png) + +**Output Definitions** + + +Profile may generate multiple outputs from a single input. Each output must define unique name and output extension (use the extension without a dot e.g. **mp4**). All other settings of output definition are optional. + +![global_extract_review_output_defs](assets/global_extract_review_output_defs.png) +- **`Tags`** + Define what will happen to output. + +- **`FFmpeg arguments`** + These arguments are appended to ffmpeg arguments auto generated by publish plugin. Some of arguments are handled automatically like rescaling or letterboxes. + - **Video filters** additional FFmpeg filters that would be defined in `-filter:v` or `-vf` command line arguments. + - **Audio filters** additional FFmpeg filters that would be defined in `-filter:a` or `-af` command line arguments. + - **Input arguments** input definition arguments of video or image sequence - this setting has limitations as you have to know what is input. + - **Output arguments** other FFmpeg output arguments like codec definition. + +- **`Output width`** and **`Output height`** + - it is possible to rescale output to specified resolution and keep aspect ratio. + - If value is set to 0, source resolution will be used. + +- **`Letter Box`** + - **Enabled** - Enable letter boxes + - **Ratio** - Ratio of letter boxes + - **Type** - **Letterbox** (horizontal bars) or **Pillarbox** (vertical bars) + - **Fill color** - Fill color of boxes (RGBA: 0-255) + - **Line Thickness** - Line thickness on the edge of box (set to `0` to turn off) + - **Fill color** - Line color on the edge of box (RGBA: 0-255) + - **Example** + + ![global_extract_review_letter_box_settings](assets/global_extract_review_letter_box_settings.png) + ![global_extract_review_letter_box](assets/global_extract_review_letter_box.png) diff --git a/website/docs/pype2/admin_anatomy.md b/website/docs/pype2/admin_anatomy.md new file mode 100644 index 0000000000..7f6342ff78 --- /dev/null +++ b/website/docs/pype2/admin_anatomy.md @@ -0,0 +1,78 @@ +--- +id: admin_anatomy +title: Project Anatomy +sidebar_label: Folder Structure +--- + +## PROJECT Structure + +This is example project structure when using Pype: + +```text +Project + β”œβ”€β”€β”€assets + β”‚ β”œβ”€β”€β”€Bob + β”‚ └───... + └───episodes + └───ep01 + └───sq01 + └───ep01_sq01_sh001 + β”œβ”€β”€β”€publish + └───work +``` + +:::note Shot naming +We do strongly recommend to name shots with their full hierarchical name. Avalon doesn't allow two assets with same name in project. Therefor if you have for example: + +```text +sequence01 / shot001 +``` +and then +```text +sequence02 / shot001 +``` +you'll run into trouble because there are now two `shot001`. + +Better way is to use full qualified name for shot. So the above become: +```text +sequence01 / sequence01_shot001 +``` + +This has two advantages: there will be no duplicities this way and artists can see just by looking at filename the whole hierarchy. +::: + +## ASSET Structure + +```text +Bob + β”œβ”€β”€β”€publish + β”‚ β”œβ”€β”€β”€model + β”‚ β”‚ β”œβ”€β”€β”€modelMain + β”‚ β”‚ β”œβ”€β”€β”€modelProxy + β”‚ β”‚ └───modelSculpt + β”‚ β”œβ”€β”€β”€workfile + β”‚ β”‚ └───taskName + β”‚ β”œβ”€β”€β”€rig + β”‚ β”‚ └───rigMain + β”‚ β”œβ”€β”€β”€look + β”‚ β”‚ β”œβ”€β”€β”€lookMain + β”‚ β”‚ β”‚ └───v01 + β”‚ β”‚ β”‚ └───texture + β”‚ β”‚ └───lookWet + β”‚ β”œβ”€β”€β”€camera + β”‚ β”‚ β”œβ”€β”€β”€camMain + β”‚ β”‚ └───camLayout + β”‚ β”œβ”€β”€β”€cache + β”‚ β”‚ β”œβ”€β”€β”€cacheChar01 + β”‚ β”‚ └───cacheRock01 + β”‚ β”œβ”€β”€β”€vrproxy + β”‚ β”œβ”€β”€β”€fx + β”‚ └───setdress + └───work + β”œβ”€β”€β”€concept + β”œβ”€β”€β”€fur + β”œβ”€β”€β”€modelling + β”œβ”€β”€β”€rig + β”œβ”€β”€β”€look + └───taskName +``` diff --git a/website/docs/pype2/admin_config.md b/website/docs/pype2/admin_config.md new file mode 100644 index 0000000000..c753ee85a4 --- /dev/null +++ b/website/docs/pype2/admin_config.md @@ -0,0 +1,392 @@ +--- +id: admin_config +title: Studio Config +sidebar_label: Studio Config +--- + +All of the studio specific configurations are stored as simple JSON files in the **pype-config** repository. + +Config is split into multiple sections described below. + +## Anatomy + +Defines where and how folders and files are created for all the project data. Anatomy has two parts **Roots** and **Templates**. + +:::warning +It is recommended to create anatomy [overrides](#per-project-configuration) for each project even if values haven't changed. Ignoring this recommendation may cause catastrophic consequences. +::: + +### Roots +Roots define where files are stored with path to shared folder. You can set them in `roots.json`. +It is required to set root path for each platform you are using in studio. All paths must point to same folder! +```json +{ + "windows": "P:/projects", + "darwin": "/Volumes/projects", + "linux": "/mnt/share/projects" +} +``` + +It is possible to set multiple roots when necessary. That may be handy when you need to store specific type of data on another disk. In that case you'll have to add one level in json. +```json +{ + "work": { + "windows": "P:/work", + "darwin": "/Volumes/work", + "linux": "/mnt/share/work" + }, + "publish": { + "windows": "Y:/publish", + "darwin": "/Volumes/publish", + "linux": "/mnt/share/publish" + } +} +``` +Usage of multiple roots is explained below in templates part. + +### Templates +Templates define project's folder structure and filenames. You can set them in `default.yaml`. + +### Required templates +We have a few required anatomy templates for Pype to work properly, however we keep adding more when needed. + +```yaml +work: + folder: "{root}/{project[name]}/{hierarchy}/{asset}/work/{task}" + file: "{project[code]}_{asset}_{task}_v{version:0>3}<_{comment}>.{ext}" + path: "{root}/{project[name]}/{hierarchy}/{asset}/work/{task}/{project[code]}_{asset}_{task}_v{version:0>3}<_{comment}>.{ext}" + +publish: + folder: "{root}/{project[name]}/{hierarchy}/{asset}/publish/{family}/{subset}/v{version:0>3}" + file: "{project[code]}_{asset}_{subset}_v{version:0>3}<.{frame}>.{representation}" + path: "{root}/{project[name]}/{hierarchy}/{asset}/publish/{family}/{subset}/v{version:0>3}/{project[code]}_{asset}_{subset}_v{version:0>3}<.{frame}>.{representation}" +``` + +Template groups `work` and `publish` must be set in all circumstances. Both must have set keys as shown `folder`, holds path template for the directory where the files are stored, `file` only holds the filename and `path` combines the two together for quicker access. + +### Available keys +| Context key | Description | +| --- | --- | +| root | Path to root folder | +| root[\] | Path to root folder when multiple roots are used.
Key `` represents root key specified in `roots.json` | +| project[name] | Project's full name. | +| project[code] | Project's code. | +| hierarchy | All hierarchical parents as subfolders. | +| asset | Name of asset or shot. | +| task | Name of task. | +| version | Version number. | +| subset | Subset name. | +| family | Main family name. | +| ext | File extention. (Possible to use only in `work` template atm.) | +| representation | Representation name. (Is used instead of `ext` except `work` template atm.) | +| frame | Frame number for sequence files. | +| output | | +| comment | | + +:::warning +Be careful about using `root` key in templates when using multiple roots. It is not allowed to combine both `{root}` and `{root[]}` in templates. +::: +:::note +It is recommended to set padding for `version` which is possible with additional expression in template. Entered key `{version:0<3}` will result into `001` if version `1` is published. +**Explanation:** Expression `0<3` will add `"0"` char to the beginning(`<`) until string has `3` characters. +::: + +| Date-Time key | Example result | Description | +| --- | --- | --- | +| d | 1, 30 | Day of month in shortest possible way. | +| dd | 01, 30 | Day of month with 2 digits. | +| ddd | Mon | Shortened week day name. | +| dddd | Monday | Full week day name. | +| m | 1, 12 | Month number in shortest possible way. | +| mm | 01, 12 | Month number with 2 digits. | +| mmm | Jan | Shortened month name. | +| mmmm | January | Full month name. | +| yy | 20 | Shortened year. | +| yyyy | 2020 | Full year. | +| H | 4, 17 | Shortened 24-hour number. | +| HH | 04, 17 | 24-hour number with 2 digits. | +| h | 5 | Shortened 12-hour number. | +| hh | 05 | 12-hour number with 2 digits. | +| ht | AM, PM | Midday part. | +| M | 0 | Shortened minutes number. | +| MM | 00 | Minutes number with 2 digits. | +| S | 0 | Shortened seconds number. | +| SS | 00 | Seconds number with 2 digits. | + +### Optional keys +Keys may be optional for some reason when are wrapped with `<` and `>`. But it is recommended to use only for these specific keys with obvious reasons: +- `output`, `comment` are optional to fill +- `frame` is used only for sequences. + +### Inner keys +It is possible to use value of one template key inside value of another template key. This can be done only per template group, which means it is not possible to use template key from `publish` group inside `work` group. + +Usage is similar to using template keys but instead of `{key}` you must add `@` in front of key: `{@key}` + +With this feature `work` template from example above may be much easier to read and modify. +```yaml +work: + folder: "{root}/{project[name]}/{hierarchy}/{asset}/work/{task}" + file: "{project[code]}_{asset}_{task}_v{version:0>3}<_{comment}>.{ext}" + path: "{@folder}/{@file}" + # This is how `path` key will look as result + # path: "{root}/{project[name]}/{hierarchy}/{asset}/work/{task}/{project[code]}_{asset}_{task}_v{version:0>3}<_{comment}>.{ext}" +``` + +:::warning +Be aware of unsolvable recursion in inner keys. +```yaml +group: + # Use key where source key is used in value + key_1: "{@key_2}" + key_2: "{@key_1}" + + # Use itself + key_3: "{@key_3}" +``` +::: + +### Global keys +Global keys are keys with value outside template groups. All these keys will be available in each template group with ability to override them inside the group. + +**Source** +```yaml +# Global key outside template group +global_key: "global value" + +group_1: + # `global_key` is not set + example_key_1: "{example_value_1}" + +group_2: + # `global_key` is iverrided + global_key: "overriden global value" +``` +**Result** +```yaml +global_key: "global value" + +group_1: + # `global_key` was added + global_key: "global value" + example_key_1: "{example_value_1}" + +group_2: + # `global_key` kept it's value for `group_2` + global_key: "overriden global value" +``` + +### Combine Inner keys with Global keys +Real power of [Inner](#inner-keys) and [Global](#global-keys) keys is their combination. + +**Template source** +```yaml +# PADDING +frame_padding: 4 +frame: "{frame:0>frame_padding}" +# MULTIPLE ROOT +root_name: "root_name_1" +root: {root[{@root_name}]} + +group_1: + example_key_1: "{@root}/{@frame}" + +group_2: + frame_padding: 3 + root_name: "root_name_2" + example_key_2: "{@root}/{@frame}" + +group_3: + frame: "{frame}" + example_key_3: "{@root}/force_value/{@frame}" +``` +**Equals** +```yaml +frame_padding: 4 +frame: "{frame:0>3}" +root_name: "root_name_1" +root: {root[root_name_1]} + +group_1: + frame_padding: 4 + frame: "{frame:0>3}" + root_name: "root_name_1" + root: {root[root_name_1]} + # `example_key_1` result + example_key_1: "{root[root_name_1]}/{frame:0>3}" + +group_2: + frame_padding: 3 + frame: "{frame:0>3}" + root_name: "root_name_2" + root: {root[root_name_2]} + # `example_key_2` result + example_key_2: "{root[root_name_2]}/{frame:0>2}" + +group_3: + frame_padding: 4 + frame: "{frame}" + root_name: "root_name_1" + root: {root[root_name_1]} + # `example_key_3` result + example_key_3: "{root[root_name_1]}/force_value/{frame}" +``` + +:::warning +Be careful about using global keys. Keep in mind that **all global keys** will be added to **all template groups** and all inner keys in their values **MUST** be in the group. +For example in [required templates](#required-templates) it seems that `path: "{@folder}/{@file}"` should be used as global key, but that would require all template groups have `folder` and `file` keys which is not true by default. +::: + +## Environments + +Here is where all the environment variables are set up. Each software has it's own environment file where we set all variables needed for it to function correctly. This is also a place where any extra in-house variables should be added. All of these individual configs and then loaded additively as needed based on current context. + +For example when launching Pype Tray, **Global** and **Avalon** envs are loaded first. If the studio uses also *Deadline* and *Ftrack*, both of those environments get added at the same time. This sets the base environment for the rest of the pipeline that will be inherited by all the applications launched from this point on. + +When user launches an application for a task, its general and versioned env files get added to the base before the software launches. When launching *Maya 2019*, both `maya.json` and `maya_2019.json` will be added. + +If the project or task also has extra tools configured, say *Arnold Mtoa 3.1.1*, a config JSON with the same name will be added too. + +This way the environment is completely dynamic with possibility of overrides on a granular level, from project all the way down to task. + +## Launchers + +Considering that different studios use different ways of deploying software to their workstations, we need to tell Pype how to launch all the individual applications available in the studio. + +Each software need multiple files prepared for it to function correctly. + +```text +application_name.toml +application_name.bat +application_name.sh +``` + +TOML file tells Pype how to work with the application across the board. Icons, Label in GUI, *Ftrack* settings but most importantly it defines what executable to run. These executable are stored in the windows and linux subfolder in the launchers folder. If `application_name.toml` defines that executable to run is `application_name`, Pype assumes that a `.bat` and `.sh` files under that name exist in the linux and windows folders in launchers. Correct version is picked automatically based on the platform Pype is running on. + +These `.bat` and `.sh` scripts have only one job then. They have to point to the exact executable path on the system, or to a command that will launch the app we want. Version granularity is up to the studio to decide. We can show artists Nuke 11.3, while specifying the particular version 11.3v4 only in the .bat file, so the artist doesn't need to deal with it, or we can present him with 11.3v4 directly. the choice is mostly between artist control vs more configuration files on the system. + +## Presets + +This is where most of the functions configuration of the pipeline happens. Colorspace, data types, burnin setting, geometry naming conventions, ftrack attributes, playblast settings, types of exports and lot's of other settings. + +Presets are categorized in folders based on what they control or what host (DCC application) they are for. We're slowly working on documenting them all, but new ones are being created regularly as well. Hopefully the categories and names are sufficiently self-explanatory. + +### colorspace + +Defines all available color spaces in the studio. These configs not only tell the system what OCIO to use, but also how exactly it needs to be applied in the give application. From loading the data, trough previewing it all the way to rendered + +### Dataflow + +Defines allowed file types and data formats across the pipeline including their particular coded and compression settings. + +### Plugins + +All the creator, loader and publisher configurations are stored here. We can override any properties of the default plugin values and more. + +#### How does it work + +Overriding plugin properties is as simple as adding what needs to be changed to +JSON file along with plugin name. + +Say you have name validating plugin: + +```python +import pyblish.api + + +class ValidateModelName(pyblish.api.InstancePlugin): + + order = pype.api.ValidateContentsOrder + hosts = ['maya'] + families = ['model'] + label = 'Validate Mesh Name' + + # check for: 'foo_001_bar_GEO` + regex = r'.*_\d*_.*_GEO' + + def process(self, instance): + # pseudocode to get nodes + models = get_models(instance.data.get("setMembers", None)) + r = re.compile(self.regex) + for model in models: + m = r.match(obj) + if m is None: + raise RuntimeError("invalid name on {}".format(model)) + +``` +_This is just non-functional example_ + +Instead of creating new plugin with different regex, you can put: + +```javascript +"ValidateModelName": { + "regex": ".*\\d*_.*_geometry" +} +``` +and put it into `repos/pype-config/presets/plugins/maya/publish.json`. There can be more entries +like that for how many plugins you need. + +That will effectively replace regex defined in plugin during runtime with the one you've just +defined in JSON file. This way you can change any properties defined in plugin. + +:::tip loader and creators +Similar way exist for *Loaders* and *Creators*. Use files `create.json` for Creators, `load.json` +for Loaders and `publish.json` for **Pyblish** plugins like extractors, validators, etc. + +Preset resolution works by getting host name (for example *Maya*) and then looking inside + `repos/pype-config/presets/plugins//publish.json` path. If plugin is not found, then + `repos/pype-config/presets/plugins/global/publish.json` is tried. +::: + +:::tip Per project plugin override +You can override plugins per project. See [Per-project configuration](#per-project-configuration) +::: + + +## Schema + +Holds all database schemas for *mongoDB*, that we use. In practice these are never changed on a per studio basis, however we included them in the config for cases where a particular project might need a very individual treatment. + +## Per-project configuration + +You can have per-project configuration with Pype. This allows you to have for example different +validation requirements, file naming, etc. + +This is very easy to set up - point `PYPE_PROJECT_CONFIGS` environment variable to place +where you want those per-project configurations. Then just create directory with project name and +that's almost it. Inside, you can follow hierarchy of **pype-config** presets. Everything put there +will override stuff in **pype-config**. + +### Example + +You have a project where you need to disable some validators - let's say overlapping +UVs validator in Maya. + +Project name is *FooProject*. +Your `PYPE_PROJECT_CONFIGS` points to `/studio/pype/projects`. + +Create projects settings directory: +```sh +mkdir $PYPE_PROJECT_CONFIGS/FooProject +``` +Now you can use plugin overrides to disable validator: + +Put: +```javascript +{ + "ValidateMeshHasOverlappingUVs": { + "enabled": false + } +} +``` +into: + +```sh +$PYPE_PROJECT_CONFIGS/FooPoject/presets/plugins/maya/publish.json +``` + +And its done. **ValidateMeshHasOverlappingUVs** is a class name of validator - you can +find that name by looking into python file containing validator code, or in Pyblish GUI. + +That way you can make it optional or set whatever properties you want on plugins and those +settings will take precedence over the default site-wide settings. diff --git a/website/docs/pype2/admin_ftrack.md b/website/docs/pype2/admin_ftrack.md new file mode 100644 index 0000000000..d321caf870 --- /dev/null +++ b/website/docs/pype2/admin_ftrack.md @@ -0,0 +1,203 @@ +--- +id: admin_ftrack +title: Ftrack Setup +sidebar_label: Ftrack Setup +--- + +import Tabs from '@theme/Tabs'; +import TabItem from '@theme/TabItem'; + + +Ftrack is currently the main project management option for Pype. This documentaion assumes that you are familiar with Ftrack and it's basic principles. If you're new to Ftrack, we recommend having a thorough look at [Ftrack Official Documentation](http://ftrack.rtd.ftrack.com/en/stable/). + +## Prepare Ftrack for Pype + +If you want to connect Ftrack to Pype you might need to make few changes in Ftrack settings. These changes would take a long time to do manually, so we prepared a few Ftrack actions to help you out. First, you'll need to launch Pype's tray application and set [Ftrack credentials](#credentials) to be able to run our Ftrack actions. + +The only action that is strictly required is [Pype Admin - Create/Update Avalon Attributes](manager_ftrack_actions#create-update-avalon-attributes), which creates and sets the Custom Attributes necessary needed for Pype to function. If you want to use pype only for new projects then you should read about best practice with [new project](#new-project). + +If you want to switch projects that are already in production, you might also need to run [Pype Doctor - Custom attr doc](manager_ftrack_actions#custom-attr-doc). + +:::caution +Keep in mind that **Custom attr doc** action will migrate certain attributes from ftrack default ones to our custom attributes. Some attributes will also be renamed. We make backup of the values, but be very carefull with this option and consults us before running it. +::: + +## Event Server + +Ftrack Event Server is the key to automation of many tasks like _status change_, _thumbnail update_, _automatic synchronization to Avalon database_ and many more. Event server should run at all times to perform all the required processing as it is not possible to catch some of them retrospectively with enough certainty. + +### Running event server + +There are specific launch arguments for event server. With `$PYPE_SETUP/pype eventserver` you can launch event server but without prior preparation it will terminate immediately. The reason is that event server requires 3 pieces of information: _Ftrack server url_, _paths to events_ and _Credentials (Username and API key)_. Ftrack server URL and Event path are set from Pype's environments by default, but the credentials must be done separatelly for security reasons. + + + +:::note There are 2 ways of passing your credentials to event server. + + + + + +- **`--ftrack-user "your.username"`** : Ftrack Username +- **`--ftrack-api-key "00000aaa-11bb-22cc-33dd-444444eeeee"`** : User's API key +- **`--store-crededentials`** : Entered credentials will be stored for next launch with this argument _(It is not needed to enter **ftrackuser** and **ftrackapikey** args on next launch)_ +- **`--no-stored-credentials`** : Stored credentials are loaded first so if you want to change credentials use this argument +- `--ftrack-url "https://yourdomain.ftrackapp.com/"` : Ftrack server URL _(it is not needed to enter if you have set `FTRACK_SERVER` in Pype' environments)_ +- `--ftrack-events-path "//Paths/To/Events/"` : Paths to events folder. May contain multiple paths separated by `;`. _(it is not needed to enter if you have set `FTRACK_EVENTS_PATH` in Pype' environments)_ + +So if you want to use Pype's environments then you can launch event server for first time with these arguments `$PYPE_SETUP/pype eventserver --ftrack-user "my.username" --ftrack-api-key "00000aaa-11bb-22cc-33dd-444444eeeee" --store-credentials`. Since that time, if everything was entered correctly, you can launch event server with `$PYPE_SETUP/pype eventserver`. + + + + +- `FTRACK_API_USER` - Username _("your.username")_ +- `FTRACK_API_KEY` - User's API key _("00000aaa-11bb-22cc-33dd-444444eeeee")_ +- `FTRACK_SERVER` - Ftrack server url _(")_ +- `FTRACK_EVENTS_PATH` - Paths to events _("//Paths/To/Events/")_ + We do not recommend you this way. + + + +::: + +:::caution +We do not recommend setting your ftrack user and api key environments in a persistent way, for security reasons. Option 1. passing them as arguments is substantially safer. +::: + +### Where to run event server + +We recommend you to run event server on stable server machine with ability to connect to Avalon database and Ftrack web server. Best practice we recommend is to run event server as service. + +:::important +Event server should **not** run more than once! It may cause big pipeline issues. +::: + +### Which user to use + +- must have at least `Administrator` role +- same user should not be used by an artist + +### Run Linux service - step by step + +1. create file: + `sudo vi /opt/pype/run_event_server.sh` + +2. add content to the file: + +```sh +export PYPE_DEBUG=3 +pushd /mnt/pipeline/prod/pype-setup +. pype eventserver --ftrack-user --ftrack-api-key +``` + +3. create service file: + `sudo vi /etc/systemd/system/pype-ftrack-event-server.service` + +4. add content to the service file + +```toml +[Unit] +Description=Run Pype Ftrack Event Server Service +After=network.target + +[Service] +Type=idle +ExecStart=/opt/pype/run_event_server.sh +Restart=on-failure +RestartSec=10s + +[Install] +WantedBy=multi-user.target +``` + +5. change file permission: + `sudo chmod 0755 /etc/systemd/system/pype-ftrack-event-server.service` + +6. enable service: + `sudo systemctl enable pype-ftrack-event-server` + +7. start service: + `sudo systemctl start pype-ftrack-event-server` + +* * * + +## Ftrack events + +Events are helpers to automation. They react to Ftrack Web Server events like change entity attribute, create of entity, etc. . + +### Delete Avalon ID from new entity _(DelAvalonIdFromNew)_ + +Is used to remove value from `Avalon/Mongo Id` Custom Attribute when entity is created. + +`Avalon/Mongo Id` Custom Attribute stores id of synchronized entities in pipeline database. When user _Copy -> Paste_ selection of entities to create similar hierarchy entities, values from Custom Attributes are copied too. That causes issues during synchronization because there are multiple entities with same value of `Avalon/Mongo Id`. To avoid this error we preventively remove these values when entity is created. + +### Next Task update _(NextTaskUpdate)_ + +Change status of next task from `Not started` to `Ready` when previous task is approved. + +Multiple detailed rules for next task update can be configured in the presets. + +### Synchronization to Avalon database _(Sync_to_Avalon)_ + +Automatic [synchronization to pipeline database](manager_ftrack#synchronization-to-avalon-database). + +This event updates entities on their changes Ftrack. When new entity is created or existing entity is modified. Interface with listing information is shown to users when [synchronization rules](manager_ftrack#synchronization-rules) are not met. This event may also undo changes when they might break pipeline. Namely _change name of synchronized entity_, _move synchronized entity in hierarchy_. + +:::important +Deleting an entity by Ftrack's default is not processed for security reasons _(to delete entity use [Delete Asset/Subset action](manager_ftrack_actions#delete-asset-subset))_. +::: + +### Synchronize hierarchical attributes _(SyncHierarchicalAttrs)_ + +Auto-synchronization of hierarchical attributes from Ftrack entities. + +Related to [Synchronize to Avalon database](#synchronization-to-avalon-database) event _(without it, it makes no sense to use this event)_. Hierarchical attributes must be synchronized with special way so we needed to split synchronization into 2 parts. There are [synchronization rules](manager_ftrack#synchronization-rules) for hierarchical attributes that must be met otherwise interface with messages about not meeting conditions is shown to user. + +### Thumbnails update _(ThumbnailEvents)_ + +Updates thumbnail of Task and it's parent when new Asset Version with thumbnail is created. + +This is normally done by Ftrack Web server when Asset Version is created with Drag&Drop but not when created with Ftrack API. + +### Version to Task status _(VersionToTaskStatus)_ + +Updates Task status based on status changes on it's `AssetVersion`. + +The issue this solves is when Asset version's status is changed but the artist assigned to Task is looking at the task status, thus not noticing the review. + +This event makes sure statuses Asset Version get synced to it's task. After changing a status on version, this event first tries to set identical status to version's parent (usually task). At this moment there are a few more status mappings hardcoded into the system. If Asset version's status was changed to: + +- `Reviewed` then Task's status will be changed to `Change requested` +- `Approved` then Task's status will be changed to `Complete` + + +### Update First Version status _(FirstVersionStatus)_ + +This event handler allows setting of different status to a first created Asset Version in ftrack. + +This is usefull for example if first version publish doesn't contain any actual reviewable work, but is only used for roundtrip conform check, in which case this version could receive status `pending conform` instead of standard `pending review` + +Behaviour can be filtered by `name` or `type` of the task assigned to the Asset Version. Configuration can be found in [ftrack presets](admin_presets_ftrack#first_version_status-dict) + +* * * + +## Credentials + +If you want to be able use Ftrack actions with Pype tray or [event server](#event-server) you need to enter credentials. The credentials required for Ftrack are `Username` and `API key`. + +### Credentials in tray + +How to handle with credentials in tray is described [here](#artist_ftrack#first-use-best-case-scenario). + +### Credentials in event server + +How to enter credentials to event server is described [here](#how-to-run-event-server). + +### Where to find API key + +Please check the [official documentation](http://ftrack.rtd.ftrack.com/en/backlog-scaling-ftrack-documentation-story/developing/api_keys.html). diff --git a/website/docs/pype2/admin_hosts.md b/website/docs/pype2/admin_hosts.md new file mode 100644 index 0000000000..24efef7f05 --- /dev/null +++ b/website/docs/pype2/admin_hosts.md @@ -0,0 +1,260 @@ +--- +id: admin_hosts +title: Hosts Setup +sidebar_label: Hosts Setup +--- + +## Host configuration + +To add new host application (for example new version of Autodesk Maya, etc.) just follow these steps: + +### Launchers + +You can find **launchers** in `repos/pype-config`. You can notice there is a bunch of **[TOML](https://en.wikipedia.org/wiki/TOML)** files and Linux and Windows shell scripts in their respective folders. **TOML** file +holds basic metadata information for host application. Their naming convention is important and follow this pattern: + +```fix +app_name[_version].toml +``` + +for example `maya_2020.toml` or `nuke_11.3.toml`. More about it later. For now, lets look on content of one of these files: + +```toml +executable = "unreal" +schema = "avalon-core:application-1.0" +application_dir = "unreal" +label = "Unreal Editor 4.24" +ftrack_label = "UnrealEditor" +icon ="ue4_icon" +launch_hook = "pype/hooks/unreal/unreal_prelaunch.py/UnrealPrelaunch" +ftrack_icon = '{}/app_icons/ue4.png' +``` + +* `executable` - specifies name (without extension) of shell script launching application (in windows/linux/darwin folders) +* `schema` - not important, specifying type of metadata +* `application_dir` - this specifies name of folder used in **app** key in [anatomy templates](admin_config#anatomy) +* `label` - name of application to show in launcher +* `ftrack_label` - name under which this application is show in ftrack actions (grouped by) +* `icon` - application icon used in avalon launcher +* `launch_hook` - path to Python code to execute before application is started (currently only from ftrack action) +* `ftrack_icon` - icon used in ftrack + +### Environments + +You can modify environment variables for you application in `repos/pype-config/environments`. Those files are +[JSON](https://en.wikipedia.org/wiki/JSON) files. Those file are loaded and processed in somewhat hierarchical way. For example - for Autodesk Maya 2020, first file named `maya.json` is processed and then `maya_2020.json` is. Syntax is following: + +```json +{ + "VARIABLE": "123", + "NEXT_VARIABLE": "{VARIABLE}4", + "PLATFORMS": { + "windows": "set_on_windows", + "linux": "set_on_linux", + "darwin": "set_on_max" + }, + "PATHS: [ + "paths/1", "path/2", "path/3" + ] +} +``` + +This will result on windows in environment with: + +```sh +VARIABLE="123" +NEXT_VARIABLE="1234" +PLATFORMS="set_on_windows" +PATHS="path/1;path/2;path/3" +``` + +### Ftrack + +You need to add your new application to ftrack so it knows about it. This is done in System Preferences of +ftrack in `Advanced:Custom Attributes`. There you can find `applications` attribute. It looks like this: + +![Ftrack - custom attributes - applications](../assets/ftrack/ftrack-custom_attrib_apps.jpg) + +Menu/value consists of two rows per application - first row is application name and second is basically filename of this **TOML** file mentioned above without `.toml` extension. After you add or modify whatever you need here, you need to add you new application to project in ftrack. Just open project Info in ftrack, find out +**Applications** and add your new application there. If you are running [event server](admin_ftrack#event-server) then this information is synced to avalon automatically. If not, you need to sync it manually by running **Sync to Avalon** action. + +Now, restart Pype and your application should be ready. + +### Conclusion + +To wrap it up: + +- create your shell scripts to launch application (don't forget to set correct OS permissions) +- create **TOML** file pointing to shell scripts, set you icons and labels there +- check or create you environment **JSON** file in `environments` even if it is empty (`{}`) +- to make it work with ftrack, modify **applications** in *Custom Attributes*, add it to your project and sync +- restart Pype + +## Autodesk Maya + +[Autodesk Maya](https://www.autodesk.com/products/maya/overview) is supported out of the box and doesn't require any special setup. Even though everything should be ready to go from the start, here is the checklist to get pype running in Maya + +1. Correct executable in launchers as explained in [here](admin_config#launchers) +2. Pype environment variable added to `PYTHONPATH` key in `maya.json` environment preset. +```json +{ + "PYTHONPATH": [ + "{PYPE_ROOT}/repos/avalon-core/setup/maya", + "{PYPE_ROOT}/repos/maya-look-assigner" + ] +} +``` + + +## Foundry Nuke + +[Foundry Nuke](https://www.foundry.com/products/nuke) is supported out of the box and doesn't require any special setup. Even though everything should be ready to go from the start, here is the checklist to get pype running in Nuke + +1. Correct executable in launchers as explained in [here](admin_config#launchers) +2. Following environment variables in `nuke.json` environment file. (PYTHONPATH might need to be changed in different studio setups) + +```json +{ + "NUKE_PATH": [ + "{PYPE_ROOT}/repos/avalon-core/setup/nuke/nuke_path", + "{PYPE_MODULE_ROOT}/setup/nuke/nuke_path", + "{PYPE_STUDIO_PLUGINS}/nuke" + ], + "PYPE_LOG_NO_COLORS": "True", + "PYTHONPATH": { + "windows": "{VIRTUAL_ENV}/Lib/site-packages", + "linux": "{VIRTUAL_ENV}/lib/python3.6/site-packages" + } +} +``` + + + +## AWS Thinkbox Deadline + +To support [AWS Thinkbox Deadline](https://www.awsthinkbox.com/deadline) you just need to: + +1. enable it in **init_env** key of your `deploy.json` file: + +```json +{ + "PYPE_CONFIG": "{PYPE_ROOT}/repos/pype-config", + "init_env": ["global", "avalon", "ftrack", "deadline"] +} +``` + +2. Edit `repos/pype-config/environments/deadline.json` and change `DEADLINE_REST_URL` to point to your Deadline Web API service. + +3. Set up *Deadline Web API service*. For more details on how to do it, see [here](https://docs.thinkboxsoftware.com/products/deadline/10.0/1_User%20Manual/manual/web-service.html). + +### Pype Dealine supplement code + +There is some code needed to be installed on Deadline repository. You can find this repository overlay in +`pype-setup/vendor/deadline`. This whole directory can be copied to your existing deadline repository. + +Currently there is just **GlobalJobPreLoad.py** script taking care of path remapping in case of multiplatform +machine setup on farm. If there is no mix of windows/linux machines on farm, there is no need to use this. + +## Virtual Vertex Muster + +Pype supports rendering with [Muster](https://www.vvertex.com/). To enable it: +1. Add `muster` to **init_env** to your `deploy.json` + file: + +```json +{ + "PYPE_CONFIG": "{PYPE_ROOT}/repos/pype-config", + "init_env": ["global", "avalon", "ftrack", "muster"] +} +``` + +2. Configure URL to Muster Web API - in `repos/pype-config/environments/muster.json`. There you need to set `MUSTER_REST_URL` to correct value. + +3. Enabled muster in [tray presets](admin_presets_tools##item_usage-dict) + +#### Template mapping + +For setting up muster templates have a look at [Muster Template preset](admin_presets_tools#muster-templates) + +:::note +User will be asked for it's Muster login credentials during Pype startup or any time later if its authentication token expires. +::: + + +## Clockify + +[Clockify](https://clockify.me/) integration allows pype users to seamlessly log their time into clockify in the background. This in turn allow project managers to have better overview of all logged times with clockify dashboards and analytics. + +1. Enable clockify, add `clockify` to **init_env** in your `deploy.json` + file: + +```json +{ + "PYPE_CONFIG": "{PYPE_ROOT}/repos/pype-config", + "init_env": ["global", "avalon", "ftrack", "clockify"] +} +``` + +2. Configure your clockify workspace. In `repos/pype-config/environments/clockify.json`, you need to change `CLOCKIFY_WORKSPACE` to the correct value + +```json +{ + "CLOCKIFY_WORKSPACE": "test_workspace" +} +``` + +3. Enabled Clockify in [tray presets](admin_presets_tools##item_usage-dict) + + +:::note +User will be asked for it's Clockify login credentials during Pype startup. +::: + + +## Unreal Editor + +Pype supports [Unreal](https://www.unrealengine.com/). This support is currently tested only on Windows platform. +You can control Unreal behavior by editing `repos/pype-config/presets/unreal/project_setup.json`: + +```json +{ + "dev_mode": false, + "install_unreal_python_engine": false +} +``` + +Setting `dev_mode` to **true** will make all new projects created on tasks by pype C++ projects. To work with those, +you need [Visual Studio](https://visualstudio.microsoft.com/) installed. + +`install_unreal_python_engine` will install [20tab/UnrealEnginePython](https://github.com/20tab/UnrealEnginePython) as plugin +in new project. This implies `dev_mode`. Note that **UnrealEnginePython** is compatible only with specific versions of Unreal Engine (usually not with the latest one). This plugin is not needed but can be used along *"standard"* python support in Unreal Engine to +extend Pype or Avalon functionality. + +### Unreal Engine version detection + +Pype is trying to automatically find installed Unreal Engine versions. This relies on [Epic Games Launcher](https://www.epicgames.com/store/en-US/). +If you have custom install location (for example you've built your own version from sources), you can set +`UNREAL_ENGINE_LOCATION` to point there. Pype then tries to find UE version in `UE_x.xx` subfolders. + +### Avalon Unreal Integration plugin + +Avalon/Pype integration needs [Avalon Unreal Integration Plugin](https://github.com/pypeclub/avalon-unreal-integration). Use `AVALON_UNREAL_PLUGIN` environment variable to point to it. When new +UE project is created, file are copied from this directory to project `Plugins`. If Pype detects that plugin +isn't already built, it will copy its source codes to new project and force `dev_mode`. In that case, you need +**Visual Studio** to compile the plugin along with the project code. + +### Dependencies + +Pype integration needs: + +* *Python Script Plugin enabled* (done automatically) +* *Editor Scripting Utilities* (done automatically) +* *PySide* installed in Unreal Python 2 (or PySide2/PyQt5 if you've build Unreal Editor with Python 3 support) (done automatically) +* *Avalon Unreal Integration plugin* ([sources are on GitHub](https://github.com/pypeclub/avalon-unreal-integration)) +* *Visual Studio 2017* is needed to build *Avalon Unreal Integration Plugin* and/or if you need to work in `dev_mode` + +### Environment Variables + +- `AVALON_UNREAL_EDITOR` points to Avalon Unreal Integration Plugin sources/build +- `UNREAL_ENGINE_LOCATION` to override Pype autodetection and point to custom Unreal intallation +- `PYPE_UNREAL_ENGINE_PYTHON_PLUGIN` path to [20tab/UnrealEnginePython](https://github.com/20tab/UnrealEnginePython) optional plugin diff --git a/website/docs/pype2/admin_install.md b/website/docs/pype2/admin_install.md new file mode 100644 index 0000000000..dad0d19b50 --- /dev/null +++ b/website/docs/pype2/admin_install.md @@ -0,0 +1,430 @@ +--- +id: admin_install +title: Pype Setup +sidebar_label: Pype Setup +--- + +import Tabs from '@theme/Tabs'; +import TabItem from '@theme/TabItem'; + +## Introduction + +The general approach to pype deployment is installing central repositories on a shared network storage which can be accessed by all artists in the studio. Simple shortcuts to launchers are then distributed to all workstations for artists to use. This approach ensures easy maintenance and updates. + +When artist first runs pype all the required python packages get installed automatically to his local workstation and updated everytime there is a change in the central installation. + +:::note +Automatic workstation installation and updates will not work in offline scenarios. In these case `pype install --force --offline` command must be triggered explicitly on the workstation +::: + +## Requirements + +### Python 3.6+ + +Pype requires Python 3.6 or later to be installed on each workstation running Pype. + +:::note +If you want to use pype with Blender, you need to upgrade your python to 3.7 or higher. +::: + +Windows version of Python can be easily grabbed at [python.org](https://www.python.org/downloads/). Install location doesn't matter but +python executable should be in `PATH` environment variable. + +:::important Linux +On linux it is somehow different and all depends on linux distribution in use. + +Some linux variants (for example *Ubuntu*) need **python-dev** variant of python package that includes python headers and developer tools. This is needed because some of **Pype** requirements need to compile themselves against python during their installation. Please, refer to your distribution community to find out how to install that package. +::: + + + + + + +```sh +sudo yum group install "Development Tools" +``` + +Python 3.6 is not part of official distribution. Easiest way is to add it with the help of *SCL* - Software Collection project. +This has advantage that it won't replace system version of python. + +```sh +sudo yum update +sudo yum install centos-release-scl +``` +Now you can install python itself: +```sh +sudo yum install rh-python36 +``` + +To be able to use installed version of python, you must enable it in shell: +```sh +scl enable rh-python36 bash +``` + +This will enable python 3.6 in currently running bash only! + +Check it with: +```sh +python --version +``` + + + + +```sh +sudo apt install build-essential +``` + +Some versions of Ubuntu already has python 3.6 installed, check it with: +```sh +python3 --version +``` +If python shows lower version then required, use: +``` +sudo apt-get update +sudo apt-get install python3-dev +``` +Please be aware that even if your system already has python 3.6, than if that +didn't come from **python3-dev** package, Pype will most likely fail to install +it's dependencies. + + + + + +:::note Override Python detection +You can override autodetection of Python. This can be useful if you want to use central network python location or some other custom location. Just set `PYPE_PYTHON_EXE` environment variable to point where you need. +::: + +-------------- + +### MongoDB + +Pype needs site-wide installation of **MongoDB**. It should be installed on +reliable server all workstations (and possibly render nodes) can connect. This +server holds **Avalon** database that is at the core of everything, containing +very important data, so it should be backed up often and if high-availability is +needed, *replication* feature of **MongoDB** should be considered. This is beyond the +scope of this documentation, please refer to [MongoDB Documentation](https://docs.mongodb.com/manual/replication/). + +Pype can run it's own instance of **mongodb**, mostly for testing and development purposes. +For that it uses locally installed **MongoDB**. + +Download it from [mognoDB website](https://www.mongodb.com/download-center/community), install it and +add to the `PATH`. On Windows, Pype tries to find it in standard installation destination or using `PATH`. + +To run **mongoDB** on server, use your server distribution tools to set it up (on Linux). + +### Git + +To be able to deploy Pype, **git** is need. It will clone all required repositories and +control versions so future updates are easier. Git is however only requirent on admin workstation for global studio updates. + +See [how to install git](https://git-scm.com/book/en/v2/Getting-Started-Installing-Git). + +To access private repositories, you'll need other optional stuff like ssh key agents, etc. + +### PowerShell (on Windows only) + +PowerShell is now included in recent versions of Windows. **Pype** requires at least +version 5.0, included in Windows 10 from beginning and available for Windows 7 SP1, +Windows 8.1 and Windows Server 2012. + +If you want to know what version of PowerShell are you running, execute in PowerShell prompt: +```powershell +$PSVersionTable +``` + If you need to install PowerShell or update it, please refer to: + [Installing powershell on windows](https://docs.microsoft.com/en-us/powershell/scripting/install/installing-powershell?view=powershell-6) + +### Xcode CLT (on Mac only) + +Pype need **Xcode Command Line Tools** installed to provide its tools and be able to install its dependencies via pythons `pip` command. Those will be downloaded +and installed automatically if needed. + +### Other +:::warning Linux headless server +If you need to run Pype's **ftrack event server** on headless linux server, be aware that due Qt dependencies, you'll need to install OpenGL support there even if server doesn't have any real use for it. +::: + +## Studio Setup + +### Pype location + +Before you install Pype, first clone **pype-setup** repository to place you want Pype to be. In studio setting +you probably want that destination to be on shared network drive so all users can access it. + +:::tip production and development branch +We recommend to maintain two *versions* of Pype. The first is **production** branch - the one your artists use everyday for their work. The second one is **development** version you should use for testing new features and as development sandbox. Development branch can point to a different Avalon database and use its own **ftrack event server**. More on that in [Pype Configuration](admin_config) + +```text +Shared Network Drive +β”œβ”€β”€β”€ pype + β”œβ”€β”€β”€ prod + └─── dev +``` + +To prepare this structure, you can use: +```sh +cd /shared_drive/pype +git clone --tag 2.4.0 https://bitbucket.com/pypeclub/pype-setup.git prod +git clone --tag 2.4.0 https://bitbucket.com/pypeclub/pype-setup.git dev +``` +::: + +Specify your version after `--branch` or `--tag` option. + +:::note +It is possible to distinguish `dev` and `prod` by changing Pype icon color to orange. To do so, you have to create `config.ini` file with content: +```text +[DEFAULT] +dev=true +``` + +And put the file to: +```text +Shared Network Drive +β”œβ”€β”€β”€ dev + └─── pype-setup + └───pypeapp + └───config.ini +``` +::: + +:::note +You should always use tags to checkout to get specific release, otherwise you end up with *develop* branch that can be unstable. +::: + +:::warning +By default, both branches will use the same virtual environment. Be careful when modifying your requirements in **dev** version because then it will influence **prod** version as well. To be safe, modify `PYPE_ENV` environment variable before using **dev** Pype commands. +::: + +### Installation + + + + + + +To install Pype you need to get first to it's root directory in powershell. + +If you have Pype on network, you should mount it as drive and assign it some consistent letter. You can also mount this network drive via junction point feature. As admin run from shell: + +```sh +mklink /d "C:\pipeline" "\\server\pipeline" +``` +Then your network drive will be available transparently at `C:\pipeline`. + +```sh +cd Z:\pype\production\pype-setup +``` +----- + +Now you can run installation itself: +```sh +.\pype.bat install +``` + + + + +To install pype you first need get to it's root directory in bash shell. +If you Pype location is on network drive, you should add it to `/etc/fstab` to +mount it automatically during system startup. +```sh +cd /location/of/pype +``` + +**Pype** can be installed with the following command: + +```sh +. pype install +``` + +On linux it is necessary to adjust user permissions to `/opt/pype/pype_env2` or whatever you set in `PYPE_ENV` and for that you need to be **root**. + + +#### Mounting network drives +If you Pype location is on network drive, you need to mount it first. Here are the steps to do it and make it so it re-mounts automatically after your computer restarts: + +1) in Finder press **Command+K** +2) enter path `smb://server/pipeline` and hit **Connect** +3) enter login and password +4) network drive is now mounted under `/Volumes/pipeline` +5) now go to **System Preferences** +6) click **Users & Groups -> Login Items** +7) click + and select mounted drive and click **Add** + + + + + +To install Pype on Mac, you need to have Administrator privileges. There are also further requirements if you need to deploy repositories. + +Run **Terminal**. Run following commands: +```sh +sudo -s +cd /Volumes/pipeline/pype/production/pype-setup +./pype install +``` + +`sudo -s` will take you to elevated privileges and you need to enter your password. `cd` will change directory to where you have pype located and `pype install` will run installation. + +If there are then warnings about some directory not owned by current user, you can fix it with following commands: + +```sh +chown -R yourusername /usr/local/pype +chown -R yourusername /Users/yourusername/Library/Caches/pip +``` +Switch `yourusername` for your user name :) + + + + + +What it basically does is: +1) Create python virtual environment on path: `C:\Users\Public\pype_env2` on Windows or `/opt/pype/pype_env2` on Linux or `/usr/local/pype/pype_env2` on Mac. +This can be overridden by setting `PYPE_ENV` to different path. Logic behind this is that this directory on Windows can be shared +between users on one machine - it only stores Pype dependencies, not any personal setting or credentials. + +2) Then it will install all python dependencies defined in `pypeapp\requirements.txt` into this virtual environment. + +Default installation will use your internet connection to download all necessary requirements. + +#### Offline installation + +You can also install Pype in offline scenarios: + +```sh +pype install --offline +``` + +This will use dependencies downloaded into `pype-setup/vendor/packages` rather than pulling directly from the internet. Those packages must, however, first be +downloaded on a machine connected to the internet using: + +```sh +pype download +``` + +:::warning multiple platforms +`pype download` will only download packages for currently running platform. So if you run it on Windows machine, only windows packages get downloaded (along with many universal ones). If you then run `pype install --offline` on Linux machine, it will probably fail as Linux specific packages will be missing. In multiplatform environments we recommend to run `pype download` on all used platform to combine all necessary packages into `vendor/packages`. +::: + +:::caution multiplatform caveat +There can be problems with libraries compatibility, when using multiplatform environments. For example if using **PyQt 5.12**, there seems to be no problem on Windows, but using it on **Centos Linux 7** will cause problems because Centos ships with some older dependent libraries that will not work with aforementioned PyQt version. +::: + +#### Forcing Installation + +Sometime it is necessary to force re-install Pype environment. To do this: + +```sh +pype install --force +``` + +or + +```sh +pype install --force --offline +``` +in offline scenarios. + +This is useful if Pype is misbehaving as first line of debugging. You can of course just manually delete `PYPE_ENV` directory and run `pype install` again. + +### Deployment + +After Pype is cloned and installed, it is necessary to *deploy* all repositories used by Pype. This must be done on a computer with +Internet access. + +```sh +pype deploy +``` + +This command will process all repositories specified in `deploy/deploy.json` and clone them into `repos/` directory. + +```sh +pype deploy --force +``` + +will deploy repositories, overwriting existing ones if they exists and setting them to state specified in *deploy.json*. + +:::note customizing deployment +You can customize your deployment to some extent. Everything specified in `deploy/deploy.json` is considered as default and can be overridden by creating your own *deploy.json* in sub directory. +```text +pype + β”œβ”€β”€β”€ pypeapp + β”œβ”€β”€β”€ deploy + β”‚ β”œβ”€β”€β”€ deploy.json + β”‚ β”œβ”€β”€β”€ deploy_schema-1.0.json + β”‚ β”œβ”€β”€β”€ my_studio_override + β”‚ β”‚ β”œβ”€β”€β”€ deploy.json + β”‚ β”‚ └─── deploy_schema-1.0.json + β”‚ ... +... +``` +In such configuration, `deploy/my_studio_override/deploy.json` will take precedence over the default one. +::: + +To validate if Pype deployment is ok, run: + +```sh +pype validate +``` + +#### Structure of `deploy.json` + +There are a few features in `deploy.json` that needs to be explained in further detail. + +Here is a list of keys used and their function: + +- `PYPE_CONFIG` - path to Pype configuration repository. +- `init_env` - these are environment files in Pype configuration repository that + are loaded immediately after Pype starts. They define basic functionality. + ```js + "init_env": ["global", "avalon", "ftrack", "deadline"] + ``` + For example, if you don't use *Deadline* but you need *Muster* support, change `deadline` to `muster`. + Pype will then load `{PYPE_CONFIG}/environments/muster.json` and set environment variables there. +- `repositories`: this is list of repositories that will be deployed to `repos/`. There are few options + for each repository: + + - `name`: name of repository will be used as directory name + - `url`: url of the git repository + - `branch` or `tag`: this specify either branch - it's *HEAD* will be checked out, or + `tag` - commit tagged with specified tag will be checked out. + +- `pip`: these are additional dependencies to be installed by *pip* to virtual environment. +- `archive_files`: archive files to be unpacked to somewhere. For example ffmpeg installation or + anything else we need to extract during deployment to some place. + + - `extract_path`: path to where this archive should be extracted + - `url` or `vendor`: this is url of source to be downloaded or name in `vendor/packages` to be Used + - `md5_url` optional url for md5 file to validate checksum of downloaded file + - `skip_first_subfolder` will move everything inside first directory in archive to `extract_path`. + +#### Offline Deployment + +In offline scenarios it is up to you to replicate what `pype deploy` does. The easiest way +to go is to run `pype deploy` on machine, get everything in `repos/` and move it to your studio install location: + +```sh +cd pype-setup +tar cvzf pype_repos.tgz repos/ +``` + +do the same for things deployed to *vendor*. diff --git a/website/docs/pype2/admin_introduction.md b/website/docs/pype2/admin_introduction.md new file mode 100644 index 0000000000..7a3b8f51d8 --- /dev/null +++ b/website/docs/pype2/admin_introduction.md @@ -0,0 +1,71 @@ +--- +id: admin_getting_started +title: Getting Started +sidebar_label: Getting Started +--- + +## Introduction + +**Pype** is part of a larger ecosystem of tools build around [avalon](https://github.com/getavalon/core) and [pyblish](https://github.com/pyblish/pyblish-base). +To be able to use it, you need those tools and set your environment. This +requires additional software installed and set up correctly on your system. + +Fortunately this daunting task is handled for you by **Pype Setup** package itself. **Pype** can +install most of its requirements automatically but a few more things are needed in +various usage scenarios. + +## Software requirements + +- **Python 3.7+** (Locally on all workstations) +- **PowerShell 5.0+** (Windows only) +- **Bash** (Linux only) +- **MongoDB** (Centrally accessible) + +There are other requirements for different advanced scenarios. For more +complete guide please refer to [Pype Setup page](admin_install). + + +## Hardware requirements + +Pype should be installed centrally on a fast network storage with at least read access right for all workstations and users in the Studio. Full Deplyoyment with all dependencies and both Development and Production branches installed takes about 1GB of data, however to ensure smooth updates and general working comfort, we recommend allocating at least at least 4GB of storage dedicated to PYPE deployment. + +For well functioning ftrack event server, we recommend a linux virtual server with Ubuntu or Centos OS. CPU and RAM allocation need differ based on the studio size, but a 2GB of ram, with a dual core CPU and around 4GB of storage should suffice + +## Central repositories + +### Pype-setup + +Pype-Setup is the glue that binds Avalon, Pype and the Studio together. It is essentially a wrapper application that manages requirements, installation, all the environments and runs all of our standalone tools. + +It has two main interfaces. `Pype` CLI command for all admin level tasks and a `Pype Tray` application for artists. Documentation for the `Pype` command can be found [here](admin_pype_commands) + +This is also the only repository that needs to be downloaded by hand before full pype deployment can take place. + +### Pype + +Pype is our "Avalon Config" in Avalon terms that takes avalon-core and expands on it's default features and capabilities. This is where vast majority of the code that works with your data lives. + +Avalon gives us the ability to work with a certain host, say Maya, in a standardised manner, but Pype defines **how** we work with all the data. You can think of it as. Avalon by default expects each studio to have their own avalon config, which is reasonable considering all studios have slightly different requirements and workflows. We abstracted a lot of this customisability out of the avalon config by allowing pype behaviour to be altered by a set of .json based configuration files and presets. + +Thanks to that, we are able to maintain one codebase for vast majority of the features across all our clients deployments while keeping the option to tailor the pipeline to each individual studio. + +### Avalon-core + +Avalon-core is the heart and soul of Pype. It provides the base functionality including GUIs (albeit expanded modified by us), database connection and maintenance, standards for data structures and working with entities and a lot of universal tools. + +Avalon is being very actively developed and maintained by a community of studios and TDs from around the world, with Pype Club team being an active contributor as well. + +## Studio Specific Repositories + +### Pype-Config + +Pype_config repository need to be prepared and maintained for each studio using pype and holds all of their specific requiremens for pype. Those range from naming conventions and folder structures (in pype referred to as `project anatomy`), through colour management, data preferences, all the way to what individual validators they want to use and what they are validating against. + +Thanks to a very flexible and extensible system of presets, we're almost always able to accommodate client requests for modified behaviour by introducing new presets, rather than permanently altering the main codebase for everyone. + + +### Studio-Project-Configs + +On top of studio wide pype config, we support project level overrides for any and all avriables and presets available in the main studio config. + +### Studio-Project-Scrips diff --git a/website/docs/pype2/admin_presets_ftrack.md b/website/docs/pype2/admin_presets_ftrack.md new file mode 100644 index 0000000000..793812dc2c --- /dev/null +++ b/website/docs/pype2/admin_presets_ftrack.md @@ -0,0 +1,119 @@ +--- +id: admin_presets_ftrack +title: Presets > Ftrack +sidebar_label: Ftrack +--- + +## PROJECT_DEFAULTS.json + +path: `pype-config/presets/ftrack/project_defauls.json` + +A list of all project defaults to be set when you run "Ftrack Prepare Project" + +```json +{ + "fps": 25, + "frameStart": 1001, + "frameEnd": 1100, + "clipIn": 1001, + "clipOut": 1100, + "handleStart": 10, + "handleEnd": 10, + + "resolutionHeight": 1080, + "resolutionWidth": 1920, + "pixelAspect": 1.0, + "applications": [ + "maya_2019", "nuke_11.3", "nukex_11.3", "nukestudio_11.3", "deadline" + ], + "tools_env": [], + "avalon_auto_sync": true +} +``` + +## FTRACK_CONFIG.json + +path: `pype-config/presets/ftrack/ftrack_config.json` + +### `sync_to_avalon` [dict] + +list of statuses that allow moving, deleting and changing of names on ftrack entities. Once any child of and entity is set to a status different than those listed in this list, it is considered to have been worked on and will not allow any major changes to hierarchy any more. + +`statuses_name_change [list]`: + +```json +{ + "sync_to_avalon": { + "statuses_name_change": ["not ready", "ready"] + } +} +``` + +### `status_update` [dict] + +mapping of status for automatic updates. +Key specifies the resulting status and value is a list of statuses from which we allow changing to the target status. + +`_ignore_` [list]: source statuses to ignore + +`target_status` [list]: target + +```json +{ + "status_update": { + "_ignore_": ["in progress", "ommited", "on hold"], + "Ready": ["not ready"], + "In Progress" : ["_any_"] + } +} +``` + +### `status_version_to_task` [dict] + +mapping of status that propagate automatically from published version to it's task. By default we search for identical status, however this preset let's you remap between different statuses on versions and tasks. + + +`status_version_to_task` [dict]: + +```json +{ + "status_version_to_task": { + "__description__": "Status `from` (key) must be lowered!", + "in progress": "in progress", + "approved": "approved" + } +} +``` + +## SERVER.json + +path: `pype-config/presets/ftrack/server.json` + +### `first_version_status` [dict] + +`task_status_map` [list]: List of dictionaires specifying individual mappings + +`status` [string]: status to set if `key` and `name` match. + +`name` [string]: name of task or task's type. + +`key` [enumerator]: _optional_ specify where to look for name. There are two possible value: + 1. `task`: task's name (default) + 2. `task_type`: task type's name + +It doesn't matter if values are lowered or capitalized. + +```json +{ + "FirstVersionStatus": { + "task_status_map": [{ + "key": "task", + "name": "compositing", + "status": "Blocking" + }, { + "MORE ITEMS...": "MORE VALUES..." + }] + }, + "...": "{...}" +} +``` diff --git a/website/docs/pype2/admin_presets_maya.md b/website/docs/pype2/admin_presets_maya.md new file mode 100644 index 0000000000..52717873d2 --- /dev/null +++ b/website/docs/pype2/admin_presets_maya.md @@ -0,0 +1,143 @@ +--- +id: admin_presets_maya +title: Presets > Maya +sidebar_label: Maya +--- + +## CAPTURE.json + +path: `pype-config/presets/maya/capture.json` + +All the viewport settings for maya playblasts. + +### `Codec` [dict] ### + +```python + "Codec": { + "compression": "jpg", + "format": "image", + "quality": 95 + } +``` + + +### `Display Options` [dict] ### + +```python +"Display Options": { + "background": [ + 0.7137254901960784, + 0.7137254901960784, + 0.7137254901960784 + ], + "backgroundBottom": [ + 0.7137254901960784, + 0.7137254901960784, + 0.7137254901960784 + ], + "backgroundTop": [ + 0.7137254901960784, + 0.7137254901960784, + 0.7137254901960784 + ], + "override_display": true + } +``` + +### `Generic` [dict] ### +```python +"Generic": { + "isolate_view": true, + "off_screen": true +}, +``` + +### `IO` [dict] ### + +```python +"IO": { + "name": "", + "open_finished": false, + "raw_frame_numbers": false, + "recent_playblasts": [], + "save_file": false +}, +``` + +### `PanZoom` [dict] ### + +```python +"PanZoom": { + "pan_zoom": true +}, +``` + +### `Viewport Options` [dict] ### + +```python +"Viewport Options": { + "cameras": false, + "clipGhosts": false, + "controlVertices": false, + "deformers": false, + "dimensions": false, + "displayLights": 0, + "dynamicConstraints": false, + "dynamics": false, + "fluids": false, + "follicles": false, + "gpuCacheDisplayFilter": false, + "greasePencils": false, + "grid": false, + "hairSystems": false, + "handles": false, + "high_quality": true, + "hud": false, + "hulls": false, + "ikHandles": false, + "imagePlane": false, + "joints": false, + "lights": false, + "locators": false, + "manipulators": false, + "motionTrails": false, + "nCloths": false, + "nParticles": false, + "nRigids": false, + "nurbsCurves": false, + "nurbsSurfaces": false, + "override_viewport_options": true, + "particleInstancers": false, + "pivots": false, + "planes": false, + "pluginShapes": false, + "polymeshes": true, + "shadows": false, + "strokes": false, + "subdivSurfaces": false, + "textures": false, + "twoSidedLighting": true +} +``` + +## Maya instance scene types + +It is possible to set when to use `.ma` or `.mb` for: + +- camera +- setdress +- layout +- model +- rig +- yetiRig + +Just put `ext_mapping.json` into `presets/maya`. Inside is simple mapping: + +```JSON +{ + "rig": "mb", + "camera": "mb" +} +``` + +*Note that default type is `ma`* diff --git a/website/docs/pype2/admin_presets_nukestudio.md b/website/docs/pype2/admin_presets_nukestudio.md new file mode 100644 index 0000000000..256c6e5ef4 --- /dev/null +++ b/website/docs/pype2/admin_presets_nukestudio.md @@ -0,0 +1,58 @@ +--- +id: admin_presets_nukestudio +title: Presets > NukeStudio +sidebar_label: Nukestudio +--- + +## TAGS.json + +path: `pype-config/presets/nukestudio/tags.json` + +Each tag defines defaults in `.json` file. Inside of the file you can change the default values as shown in the example (`>>>"1001"<<<`). Please be careful not to alter the `family` value. + +```python +"Frame start": { + "editable": "1", + "note": "Starting frame for comps", + "icon": { + "path": "icons:TagBackground.png" + }, + "metadata": { + "family": "frameStart", + "number": >>>"1001"<<< + } +} +``` + +## PUBLISH.json + +path: `pype-config/presets/plugins/nukestudio/publish.json` + +### `CollectInstanceVersion` [dict] ### + + +This plugin is set to `true` by default so it will synchronize version of published instances with the version of the workfile. Set `enabled` to `false` if you wish to let publishing process decide on the next available version. + +```python +{ + "CollectInstanceVersion": { + "enabled": false + } +} +``` + +### `ExtractReviewCutUpVideo` [dict] ### + +path: `pype-config/presets/plugins/nukestudio/publish.json` + +Plugin is responsible for cuting shorter or longer source material for review. Here you can add any aditional tags you wish to be added into extract review process. + +The plugin generates reedited intermediate video with handless even if it has to add empty black frames. Some productions prefer to use review material without handless so in the example, `no-handles` are added as tags. This allow furter review extractor to publish review without handles, without affecting other outputs. + +```python +{ + "ExtractReviewCutUpVideo": { + "tags_addition": ["no-handles"] + } +} +``` diff --git a/website/docs/pype2/admin_presets_plugins.md b/website/docs/pype2/admin_presets_plugins.md new file mode 100644 index 0000000000..797995d2b7 --- /dev/null +++ b/website/docs/pype2/admin_presets_plugins.md @@ -0,0 +1,592 @@ +--- +id: admin_presets_plugins +title: Presets > Plugins +sidebar_label: Plugins +--- + +## Global + +### publish.json + +Each plugin in the json should be added as name of the class. There are some default attributes recommended to use in case you wish a plugin to be switched off for some projects in `project overwrites` like `enabled: false`. So for example if you wish to switch off plugin class name `PluginName(pyblish.api.contextPlugin)` if file `name_of_plugin_file.py`, it could be done only by adding following text into root level of publish.json file: + +```json +{ + "PluginName": { + "enabled": false + } +} +``` + + +### `ExtractReview` + +Plugin responsible for automatic FFmpeg conversion to variety of formats. + +Supported extensions for both input and output: `["exr", "jpg", "jpeg", "png", "dpx", "mov", "mp4"]` + +**ExtractReview** creates new representations based on presets and representations in instance. Preset should contain only one attribute **"profiles"** which is list of profile items. Each profile item has **outputs**, where definitions of possible outputs are, and may have specified filters for **hosts**, **tasks** and **families**. + +#### Profile filters +As mentioned above you can define multiple profiles for different contexts. Profile with filters matching current context the most is used. You can define profile without filters and use it as **default**. Only **one or none** profile is processed per instance. + +All context filters are lists which may contain strings or Regular expressions (RegEx). +- **hosts** - Host from which publishing was triggered. `["maya", "nuke"]` +- **tasks** - Currently processed task. `["[Cc]ompositing", "[Aa]nimation"]` +- **families** - Main family of processed instance. `["plate", "model"]` + +:::important Filtering +Filters are optional and may not be set. In case when multiple profiles match current context, profile with filters has higher priority that profile without filters. +::: + +#### Profile outputs +Profile may have multiple outputs from one input and that's why **outputs** is dictionary where key represents **filename suffix** to avoid overriding files with same name and value represents definition itself. Definition may contain multiple optional keys. + +| Key | Description | Type | Example | +| --- | --- | --- | --- | +| **width** | Width of output. | int | 1920 | +| **height** | Height of output. | int | 1080 | +| **letter_box** | Set letterbox ratio. | float | 2.35 | +| **ext** | Extension of output file(s). | str | "mov" | +| **tags** | Tags added to new representation. | list | [here](#new-representation-tags-tags) | +| **ffmpeg_args** | Additional FFmpeg arguments. | dict | [here](#ffmpeg-arguments-ffmpeg_args) | +| **filter** | Filters definition. | dict | [here](#output-filters-filter) | + +:::note +As metioned above **all keys are optional**. If they are not filled at all, then **"ext"** is filled with input's file extension and resolution keys **"width"** and **"heigh"** are filled from instance data, or from input resolution if instance doesn't have set them. +::: + +:::important resolution +It is not possible to enter only **"width"** or only **"height"**. In that case set values will be skipped. +::: + +#### New representation tags (`tags`) +You can add tags to representation created during extracting process. This might help to define what should happen with representation in upcomming plugins. + +| Tag | Description | +| --- | --- | +| **burnin** | Add burnins with predefined values into the output. | +| **preview** | Will be used as preview in Ftrack. | +| **reformat** | Rescale to format based on width and height keys. | +| **bake-lut** | Bake LUT into the output (if is available path in data). | +| **slate-frame** | Add slate frame at the beggining of video. | +| **no-handles** | Remove the shot handles from the output. | +| **sequence** | Generate a sequence of images instead of single frame.
Is applied only if **"ext"** of output is image extension e.g.: png or jpg/jpeg. | + +:::important Example +Tags key must contain list of strings. +```json +{ + "tags": ["burnin", "preview"] + ... +} +``` +::: + +#### FFmpeg arguments (`ffmpeg_args`) +It is possible to set additional FFmpeg arguments. Arguments are split into 4 categories **"input"**, **"video_filters"**, **"audio_filters"** and **"output"**. + +| Key | Description | Type | Example | +| --- | --- | --- | --- | +| **input** | FFmpeg arguments added before video/image input. | list | ["-gamma 2.2"] | +| **video_filters** | All values which should be in `-vf` or `-filter:v` argument. | list | ["scale=iw/2:-1"] | +| **audio_filters** | All values which should be in `-af` or `-filter:a` argument. | list | ["loudnorm"] | +| **output** | FFmpeg arguments added before output filepath. | list | ["-pix_fmt yuv420p", "-crf 18"] | + +:::important Example +For more information about FFmpeg arguments please visit [official documentation](https://ffmpeg.org/documentation.html). +```json +{ + "ffmpeg_args": { + "input": ["-gamma 2.2"], + "video_filters": ["yadif=0:0:0", "scale=iw/2:-1"], + "output": ["-pix_fmt yuv420p", "-crf 18"] + } + ... +} +``` +::: + +#### Output filters (`filter`) +Even if profile has filtering options it is possible that output definitions require to be filtered by all instance **families** or representation's **tags**. + +Families filters in output's `filter` will check all instance's families and may check for single family or combination of families. + +| Key | Description | Type | Example | +| --- | --- | --- | --- | +| **families** | At least one family item must match instance's families to process definition. | list | ["review"] | +| **tags** | At least one tag from list must be in representation's tags to process definition. | list | ["preview"] | + +:::important Example +These filters helps with explicit processing but do **NOT** use them if it's not necessary. +```json +{ + "filter": { + "families": [ + "review", + ["ftrack", "render2d"] + ], + "tags": ["preview"], + } + ... +} +``` +In this example representation's tags must contain **"preview"** tag and instance's families must contain **"review"** family, or both **"ftrack"** and **"render2d"** families. +::: + +#### Simple example +This example just create **mov** output with filename suffix **"simplemov"** for all representations with supported extensions. +```json +{ + "ExtractReview": { + "profiles": [{ + "outputs": { + /* Filename suffix "simplemov"*/ + "simplemov": { + /* Output extension will be "mov"*/ + "ext": "mov" + } + } + }] + } +} +``` + +#### More complex example +:::note +This is just usage example, without relevant data. Do **NOT** use these presets as default in production. +::: + +```json +{ + "ExtractReview": { + "profiles": [ + { + /* 1. profile - Without filters will be used as default. */ + "outputs": { + /* Extract single mov Prores 422 with burnins, slate and baked lut. */ + "prores": { + "ext": "mov", + "codec": [ + "-codec:v prores_ks", + "-profile:v 3" + ], + "tags": ["burnin", "reformat", "bake-lut", "slate-frame"] + } + } + }, { + /* 2. profile - Only for Nuke, "compositing" task and instance family "render2d". */ + "hosts": ["nuke"], + "tasks": ["compositing"], + "families": ["render2d"], + "outputs": { + /* Extract preview mov with burnins and without handles.*/ + "h264": { + "ext": "mov", + "ffmpeg_args": { + "output": [ + "-pix_fmt yuv420p", + ] + }, + "tags": ["burnin", "preview", "no-handles"] + }, + /* Also extract mxf with slate */ + "edit": { + "ext": "mxf", + "ffmpeg_args": { + "output": [ + "-codec:v dnxhd", + "-profile:v dnxhr_444", + "-pix_fmt yuv444p10le", + "-b:v 185M", + "-ar 48000", + "-qmax 51" + ] + }, + "tags": ["slate-frame"] + } + } + }, { + /* 3. profile - Default profile for Nuke and Maya. */ + "hosts": ["maya", "nuke"], + "outputs": { + /* Extract preview mov with burnins and with forced resolution. */ + "h264": { + "width": 1920, + "height": 1080, + "ext": "mov", + "ffmpeg_args": { + "input": [ + "-gamma 2.2" + ], + "output": [ + "-pix_fmt yuv420p", + "-crf 18", + "-intra" + ] + }, + "tags": ["burnin", "preview"] + } + } + } + ] + } +} +``` + + +### `ExtractBurnin` + +Plugin is responsible for adding burnins into review representations. + +Burnins are text values painted on top of input and may be surrounded with box in 6 available positions `Top Left`, `Top Center`, `Top Right`, `Bottom Left`, `Bottom Center`, `Bottom Right`. + +![presets_plugins_extract_burnin](../assets/presets_plugins_extract_burnin_01.png) + +ExtractBurnin creates new representations based on plugin presets and representations in instance. Presets may contain 3 keys **options**, **profiles** and **fields**. + +#### Burnin settings (`options`) +Options is dictionary where you can set the global appearance of burnins. It is possible to not fill options at all, in that case default values are used. + +| Key | Description | Type | Example | Default | +| --- | --- | --- | --- | --- | +| **font_size** | Size of text. | float | 24 | 42 | +| **font_color** | Color of text. | str | [FFmpeg color documentation](https://ffmpeg.org/ffmpeg-utils.html#color-syntax) | "white" | +| **opacity** | Opacity of text. | float | 0.7 | 1 | +| **x_offset** | Horizontal margin around text and box. | int | 0 | 5 | +| **y_offset** | Vertical margin around text and box. | int | 0 | 5 | +| **bg_padding** | Padding for box around text. | int | 0 | 5 | +| **bg_color** | Color of box around text. | str | [FFmpeg color documentation](https://ffmpeg.org/ffmpeg-utils.html#color-syntax) | "black" | +| **bg_opacity** | Opacity of box around text. | float | 1 | 0.5 | + +#### Burnin profiles (`profiles`) +Plugin process is skipped if `profiles` are not set at all. Profiles contain list of profile items. Each profile item has **burnins**, where definitions of possible burnins are, and may have specified filters for **hosts**, **tasks** and **families**. Filters work the same way as described in [ExtractReview](#profile-filters). + +#### Profile burnins +Profile may have set multiple burnin outputs from one input and that's why **burnins** is dictionary where key represents **filename suffix** to avoid overriding files with same name and value represents burnin definition. Burnin definition may contain multiple optional keys. + +| Key | Description | Type | Example | +| --- | --- | --- | --- | +| **top_left** | Top left corner content. | str | "{dd}.{mm}.{yyyy}" | +| **top_centered** | Top center content. | str | "v{version:0>3}" | +| **top_right** | Top right corner content. | str | "Static text" | +| **bottom_left** | Bottom left corner content. | str | "{asset}" | +| **bottom_centered** | Bottom center content. | str | "{username}" | +| **bottom_right** | Bottom right corner content. | str | "{frame_start}-{current_frame}-{frame_end}" | +| **options** | Options overrides for this burnin definition. | dict | [Options](#burnin-settings-options) | +| **filter** | Filters definition. | dict | [ExtractReview output filter](#output-filters-filter) | + +:::important Position keys +Any position key `top_left` -> `bottom_right` is skipped if is not set, contain empty string or is set to `null`. +And position keys are not case sensitive so instead of key `top_left` can be used `TOP_LEFT` or `Top_Left` +::: + +:::note Filename suffix +Filename suffix is appended to filename suffix of source representation. +If source representation has suffix **"h264"** and burnin suffix is **"client"** then final suffix is **"h264_client"**. +::: + +**Available keys in burnin content** + +- It is possible to use same keys as in [Anatomy](admin_config#available-keys). + +- It is allowed to use [Anatomy templates](admin_config#anatomy) themselves in burnins if they can be filled with available data. + +- Additional keys in burnins: + | Burnin key | Description | + | --- | --- | + | frame_start | First frame number. | + | frame_end | Last frame number. | + | current_frame | Frame number for each frame. | + | duration | Count number of frames. | + | resolution_width | Resolution width. | + | resolution_height | Resolution height. | + | fps | Fps of an output. | + | timecode | Timecode by frame start and fps. | + +:::warning +`timecode` is specific key that can be **only at the end of content**. (`"BOTTOM_RIGHT": "TC: {timecode}"`) +::: + +```json +{ + "profiles": [{ + "burnins": { + "example": { + "TOP_LEFT": "{dd}.{mm}.{yyyy}", + /* Use anatomy template values. */ + "TOP_CENTERED": "{anatomy[publish][path]}", + /* Python's formatting: + ":0>3" adds padding to version number to have 3 digits. */ + "TOP_RIGHT": "v{version:0>3}", + "BOTTOM_LEFT": "{frame_start}-{current_frame}-{frame_end}", + "BOTTOM_CENTERED": "{asset}", + "BOTTOM_RIGHT": "{username}" + } + } + }] + ... +} +``` + + +#### Default content values (`fields`) +If you want to set position content values for all or most of burnin definitions, you can set them in **"fields"**. They will be added to every burnin definition in all profiles. Value can be overriden if same position key is filled in burnin definiton. + +```json +{ + "fields": { + "TOP_LEFT": "{yy}-{mm}-{dd}", + "TOP_CENTERED": "{username}", + "TOP_RIGHT": "v{version:0>3}" + }, + "profiles": [{ + "burnins": { + /* example1 has empty definition but top left, center and right values + will be filled. */ + "example1": {}, + + /* example2 has 2 overrides. */ + "example2": { + /* Top left value is overriden with asset name. */ + "TOP_LEFT": "{asset}", + /* Top center will be skipped. */ + "TOP_CENTERED": null + } + } + }] +} +``` + +#### Full presets example +:::note +This is just usage example, without relevant data. Do **NOT** use these presets as default in production. +::: + +```json +{ + "ExtractBurnin": { + "options": { + "opacity": 1, + "x_offset": 5, + "y_offset": 5, + "bg_padding": 5, + "bg_opacity": 0.5, + "font_size": 42 + }, + "fields": { + "TOP_LEFT": "{yy}-{mm}-{dd}", + "TOP_RIGHT": "v{version:0>3}" + }, + "profiles": [{ + "burnins": { + "burnin": { + "options": { + "opacity": 1 + }, + "TOP_LEFT": "{username}" + } + } + }, { + "families": ["animation", "pointcache", "model"], + "tasks": ["animation"], + "burnins": {} + }, { + "families": ["render"], + "tasks": ["compositing"], + "burnins": { + "burnin": { + "TOP_LEFT": "{yy}-{mm}-{dd}", + "TOP_RIGHT": "v{version:0>3}", + "BOTTOM_RIGHT": "{frame_start}-{current_frame}-{frame_end}", + "BOTTOM_LEFT": "{username}" + }, + "burnin_ftrack": { + "filter": { + "families": ["ftrack"] + }, + "BOTTOM_RIGHT": "{frame_start}-{current_frame}-{frame_end}", + "BOTTOM_LEFT": "{username}" + }, + "burnin_v2": { + "options": { + "opacity": 0.5 + }, + "TOP_LEFT": "{yy}-{mm}-{dd}", + "TOP_RIGHT": "v{version:0>3}" + } + } + }, { + "families": ["rendersetup"], + "burnins": { + "burnin": { + "TOP_LEFT": "{yy}-{mm}-{dd}", + "BOTTOM_LEFT": "{username}" + } + } + }, { + "tasks": ["animation"], + "burnins": { + "burnin": { + "TOP_RIGHT": "v{version:0>3}", + "BOTTOM_RIGHT": "{frame_start}-{current_frame}-{frame_end}" + } + } + }] + } +} +``` + +### `ProcessSubmittedJobOnFarm` + +```json +{ + "ProcessSubmittedJobOnFarm": { + "aov_filter": { + "host": ["aov_name"], + "maya": ["beauty"] + }, + "deadline_pool": "" + } +} +``` + +## Maya + +### load.json + +### `colors` + +maya outliner colours for various families + +```python +"colors": { + "model": [0.821, 0.518, 0.117], + "rig": [0.144, 0.443, 0.463], + "pointcache": [0.368, 0.821, 0.117], + "animation": [0.368, 0.821, 0.117], + "ass": [1.0, 0.332, 0.312], + "camera": [0.447, 0.312, 1.0], + "fbx": [1.0, 0.931, 0.312], + "mayaAscii": [0.312, 1.0, 0.747], + "setdress": [0.312, 1.0, 0.747], + "layout": [0.312, 1.0, 0.747], + "vdbcache": [0.312, 1.0, 0.428], + "vrayproxy": [0.258, 0.95, 0.541], + "yeticache": [0.2, 0.8, 0.3], + "yetiRig": [0, 0.8, 0.5] +} +``` + +### publish.json + +### `ValidateModelName` + +```python +"ValidateModelName": { + "enabled": false, + "material_file": "/path/to/shader_name_definition.txt", + "regex": "(.*)_(\\d)*_(?P.*)_(GEO)" +}, +``` + +### `ValidateShaderName` + +```python +"ValidateShaderName": { + "enabled": false, + "regex": "(?P.*)_(.*)_SHD" +} +``` + +## Nuke + +### create.json + +### `CreateWriteRender` + +```python +"CreateWriteRender": { + "fpath_template": "{work}/renders/nuke/{subset}/{subset}.{frame}.{ext}" +} +``` + +### publish.json + +### `ExtractThumbnail` + +Plugin responsible for generating thumbnails with colorspace controlled by Nuke. Reformat node will secure proper framing within the default workfile screen space. + +```json +{ +"nodes": { + "Reformat": [ + ["type", "to format"], + ["format", "HD_1080"], + ["filter", "Lanczos6"], + ["black_outside", true], + ["pbb", false] + ] +} +} +``` + +### `ExtractReviewDataMov` + +`viewer_lut_raw` **true** will publish the baked mov file without any colorspace conversion. It will be baked with the workfile workspace. This can happen in case the Viewer input process uses baked screen space luts. + +#### baking with controlled colorspace + +Some productions might be using custom OCIO config files either for whole project, sequence or even individual shots. In that case we can use **display roles** to let compositors use their preferred viewer space, but also make sure baking of outputs is happening in a defined space for clients reviews. + + +`bake_colorspace_fallback` this will be used if for some reason no space defined in `shot_grade_rec709` is found on shot's _config.ocio_ + +> be aware this will only work if `viewer_lut_raw` is on _false_ + +```json +{ +"viewer_lut_raw": false, +"bake_colorspace_fallback": "show_lut_rec709", +"bake_colorspace_main": "shot_grade_rec709" +} +``` + +## NukeStudio + +### Publish.json + +Destination of the following example codes: + +[`presets/plugins/nukestudio/publish.json`](https://github.com/pypeclub/pype-config/blob/develop/presets/plugins/nukestudio/publish.json) + +### `CollectInstanceVersion` + +Activate this plugin if you want your published plates to always have the same version as the hiero project they were published from. If this plugin is off, plate versioning automatically finds the next available version in the database. + +```json +{ + "CollectInstanceVersion": { + "enabled": true + } +} +``` + +### `ExtractReviewCutUpVideo` + +Example of tag which could be added into the plugin preset. +In this case because we might have 4K plates but we would like to publish all review files reformated to 2K. + +[details of available tags](#preset-attributes) + +```json +{ + "ExtractReviewCutUpVideo": { + "tags_addition": ["reformat"] + } +} +``` + +## Standalone Publisher + +Documentation yet to come. diff --git a/website/docs/pype2/admin_presets_tools.md b/website/docs/pype2/admin_presets_tools.md new file mode 100644 index 0000000000..bcd992a1eb --- /dev/null +++ b/website/docs/pype2/admin_presets_tools.md @@ -0,0 +1,191 @@ +--- +id: admin_presets_tools +title: Presets > Tools +sidebar_label: Tools +--- + +## Colorspace + +We provide two examples of possible settings for nuke, but these can vary wildly between clients and projects. + +### `Default` [dict] + +path: `pype-config/presets/colorspace/default.json` + +```python +"nuke": { + "root": { + "colorManagement": "Nuke", + "OCIO_config": "nuke-default", + "defaultViewerLUT": "Nuke Root LUTs", + "monitorLut": "sRGB", + "int8Lut": "sRGB", + "int16Lut": "sRGB", + "logLut": "Cineon", + "floatLut": "linear" + }, + "viewer": { + "viewerProcess": "sRGB" + }, + "write": { + "render": { + "colorspace": "linear" + }, + "prerender": { + "colorspace": "linear" + }, + "still": { + "colorspace": "sRGB" + } + } +}, +``` + +### `aces103-cg` [dict] + + +path: `pype-config/presets/colorspace/aces103-cg.json` + +```python +"nuke": { + "root": { + "colorManagement": "OCIO", + "OCIO_config": "aces_1.0.3", + "workingSpaceLUT": "ACES - ACEScg", + "defaultViewerLUT": "OCIO LUTs", + "monitorLut": "ACES/sRGB D60 sim.", + "int8Lut": "Utility - sRGB - Texture", + "int16Lut": "Utility - sRGB - Texture", + "logLut": "Input - ARRI - V3 LogC (EI800) - Wide Gamut", + "floatLut": "ACES - ACES2065-1" + }, + "viewer": { + "viewerProcess": "sRGB D60 sim. (ACES)" + }, + "write": { + "render": { + "colorspace": "ACES - ACEScg" + }, + "prerender": { + "colorspace": "ACES - ACEScg" + }, + "still": { + "colorspace": "Utility - Curve - sRGB" + } + } +}, +``` + + +## Creator Defaults + +path: `pype-config/presets/tools/creator.json` + +This preset tells the creator tools what family should be pre-selected in different tasks. Keep in mind that the task is matched loosely so for example any task with 'model' in it's name will be considered a modelling task for these purposes. + +`"Family name": ["list, "of, "tasks"]` + +```python +"Model": ["model"], +"Render Globals": ["light", "render"], +"Layout": ["layout"], +"Set Dress": ["setdress"], +"Look": ["look"], +"Rig": ["rigging"] +``` + +## Project Folder Structure + +path: `pype-config/presets/tools/project_folder_structure.json` + +Defines the base folder structure for a project. This is supposed to act as a starting point to quickly creat the base of the project. You can add `[ftrack.entityType]` after any of the folders here and they will automatically be also created in ftrack project. + +### `__project_root__` [dict] + +```python +"__project_root__": { + "_prod" : {}, + "_resources" : { + "footage": { + "ingest": {}, + "offline": {} + }, + "audio": {}, + "art_dept": {}, + }, + "editorial" : {}, + "assets[ftrack.Library]": { + "characters[ftrack]": {}, + "locations[ftrack]": {} + }, + "shots[ftrack.Sequence]": { + "editorial[ftrack.Folder]": {} + } +} +``` + +## Software Folders + +path: `pype-config/presets/tools/sw_folders.json` + +Defines extra folders to be created inside the work space when particular task type is launched. Mostly used for configs, that use {app} key in their work template and want to add hosts that are not supported yet. + +```python +"compositing": ["nuke", "ae"], +"modeling": ["maya", "app2"], +"lookdev": ["substance"], +"animation": [], +"lighting": [], +"rigging": [] +``` + +## Tray Items + +path: `pype-config/presets/tray/menu_items.json` + +This preset let's admins to turn different pype modules on and off from the tray menu, which in turn makes them unavailable across the pipeline + +### `item_usage` [dict] + +```python +"item_usage": { + "User settings": false, + "Ftrack": true, + "Muster": false, + "Avalon": true, + "Clockify": false, + "Standalone Publish": true, + "Logging": true, + "Idle Manager": true, + "Timers Manager": true, + "Rest Api": true +}, +``` + +## Muster Templates + +path: `pype-config/presets/muster/templates_mapping.json` + +Muster template mapping maps Muster template ID to name of renderer. Initially it is set Muster defaults. About templates and Muster se Muster Documentation. Mapping is defined in: + +Keys are renderer names and values are templates IDs. + +```python +"3delight": 41, +"arnold": 46, +"arnold_sf": 57, +"gelato": 30, +"harware": 3, +"krakatoa": 51, +"file_layers": 7, +"mentalray": 2, +"mentalray_sf": 6, +"redshift": 55, +"renderman": 29, +"software": 1, +"software_sf": 5, +"turtle": 10, +"vector": 4, +"vray": 37, +"ffmpeg": 48 +``` diff --git a/website/docs/pype2/admin_pype_commands.md b/website/docs/pype2/admin_pype_commands.md new file mode 100644 index 0000000000..245e8a23e2 --- /dev/null +++ b/website/docs/pype2/admin_pype_commands.md @@ -0,0 +1,287 @@ +--- +id: admin_pype_commands +title: Pype Commands Reference +sidebar_label: Pype Commands +--- + + + +## Help + +To get all available commands: +```sh +pype --help +``` + +To get help on particular command: +```sh +pype --help +``` + +-------------------- +## `clean` + +Command to clean Python bytecode files from Pype and it's environment. Useful +for developers after code or environment update. + +-------------------- + +## `coverage` + +### `--pype` +- without this option, tests are run on *pype-setup* only. + +Generate code coverage report. +```sh +pype coverage --pype +``` + +-------------------- + +## `deploy` + +To deploy Pype: +```sh +pype deploy +``` + +### `--force` + +To force re-deploy: +```sh +pype deploy --force +``` + +--------------------------- + +## `download` + +To download required dependencies: +```sh +pype download +``` + +-------------------- + +## `eventserver` + +This command launches ftrack event server. + +This should be ideally used by system service (such us systemd or upstart +on linux and window service). + +You have to set either proper environment variables to provide URL and +credentials or use option to specify them. If you use `--store_credentials` +provided credentials will be stored for later use. + +To run ftrack event server: +```sh +pype eventserver --ftrack-url= --ftrack-user= --ftrack-api-key= --ftrack-events-path= --no-stored-credentials --store-credentials +``` + +### `--debug` +- print debug info + +### `--ftrack-url` +- URL to ftrack server + +### `--ftrack-user` +- user name to log in to ftrack + +### `--ftrack-api-key` +- ftrack api key + +### `--ftrack-events-path` +- path to event server plugins + +### `--no-stored-credentials` +- will use credential specified with options above + +### `--store-credentials` +- will store credentials to file for later use + +-------------------- + +## `install` + +To install Pype: + +```sh +pype install +``` + +### `--force` + +To reinstall Pype: +```sh +pype install --force +``` + +### `--offline` + +To install Pype in offline mode: +```sh +pype install --offline +``` + +To reinstall Pype in offline mode: +```sh +pype install --offline --force +``` + +-------------------- + +## `launch` + +Launch application in Pype environment. + +### `--app` + +Application name - this should be the same as it's [defining toml](admin_hosts#launchers) file (without .toml) + +### `--project` +Project name + +### `--asset` +Asset name + +### `--task` +Task name + +### `--tools` +*Optional: Additional tools environment files to add* + +### `--user` +*Optional: User on behalf to run* + +### `--ftrack-server` / `-fs` +*Optional: Ftrack server URL* + +### `--ftrack-user` / `-fu` +*Optional: Ftrack user* + +### `--ftrack-key` / `-fk` +*Optional: Ftrack API key* + +For example to run Python interactive console in Pype context: +```sh +pype launch --app python --project my_project --asset my_asset --task my_task +``` + +-------------------- + +## `make_docs` + +Generate API documentation into `docs/build` +```sh +pype make_docs +``` + +-------------------- + +## `mongodb` + +To run testing mongodb database (requires mongoDB installed on the workstation): +```sh +pype mongodb +``` + +-------------------- + +## `publish` + +Pype takes JSON from provided path and use it to publish data in it. +```sh +pype publish +``` + +### `--gui` +- run Pyblish GUI + +### `--debug` +- print more verbose infomation + +-------------------- + +## `test` + +### `--pype` +- without this option, tests are run on *pype-setup* only. + +Run test suite on Pype: +```sh +pype test --pype +``` +:::note Pytest +For more information about testing see [Pytest documentation](https://docs.pytest.org/en/latest/) +::: + +-------------------- + +## `texturecopy` + +Copy specified textures to provided asset path. + +It validates if project and asset exists. Then it will +copy all textures found in all directories under `--path` to destination +folder, determined by template texture in **anatomy**. I will use source +filename and automatically rise version number on directory. + +Result will be copied without directory structure so it will be flat then. +Nothing is written to database. + +### `--project` + +### `--asset` + +### `--path` + +```sh +pype texturecopy --project --asset --path +``` + +-------------------- + +## `tray` + +To launch Tray: +```sh +pype tray +``` + +### `--debug` + +To launch Tray with debugging information: +```sh +pype tray --debug +``` + +-------------------- + +## `update-requirements` + +Synchronize dependecies in your virtual environment with requirement.txt file. +Equivalent of running `pip freeze > pypeapp/requirements.txt` from your virtual +environmnet. This is useful for development purposes. + +```sh +pype update-requirements +``` + +-------------------- + +## `validate` + +To validate deployment: +```sh +pype validate +``` + +-------------------- + +## `validate-config` + +To validate JSON configuration files for syntax errors: +```sh +pype validate-config +``` diff --git a/website/docs/pype2/admin_setup_troubleshooting.md b/website/docs/pype2/admin_setup_troubleshooting.md new file mode 100644 index 0000000000..2ffdd9b5f3 --- /dev/null +++ b/website/docs/pype2/admin_setup_troubleshooting.md @@ -0,0 +1,17 @@ +--- +id: admin_setup_troubleshooting +title: Setup Troubleshooting +sidebar_label: Setup Troubleshooting +--- + +## SSL Server certificates + +Python is strict about certificates when connecting to server with SSL. If +certificate cannot be validated, connection will fail. Therefor care must be +taken when using self-signed certificates to add their certification authority +to trusted certificates. + +Also please note that even when using certificates from trusted CA, you need to +update your trusted CA certificates bundle as those certificates can change. + +So if you receieve SSL error `cannot validate certificate` or similar, please update root CA certificate bundle on machines and possibly **certifi** python package in Pype virtual environment - just edit `pypeapp/requirements.txt` and update its version. You can find current versions on [PyPI](https://pypi.org). diff --git a/website/docs/system_introduction.md b/website/docs/system_introduction.md new file mode 100644 index 0000000000..71c5d64aa8 --- /dev/null +++ b/website/docs/system_introduction.md @@ -0,0 +1,71 @@ +--- +id: system_introduction +title: Introduction +sidebar_label: Introduction +--- + + +**OpenPype** is a python application built on top of many other open-source libraries, modules and projects. +To be able to use it, you need those tools and set your environment. This +requires additional software installed and set up correctly on your system. + +Fortunately this daunting task is mostly handled for you by OpenPype build and install scripts. **OpenPype** can +install most of its requirements automatically but a few more things are needed in +various usage scenarios. + +## Studio Preparation + +You can find detailed breakdown of technical requirements [here](dev_requirements), but in general OpenPype should be able +to operate in most studios fairly quickly. The main obstacles are usually related to workflows and habits, that +might now be fully compatible with what OpenPype is expecting or enforcing. + +Keep in mind that if you run into any workflows that are not supported, it's usually just because we haven't hit +that particular case and it can most likely be added upon request. + + +## Artist Workstations + +To use **OpenPype** in production, it should be installed on each artist workstation, whether that is in the studio or at home in +case of a distributed workflow. Once started, it lives in the system tray menu bar and all of it's tools are executed locally on +the artist computer. There are no special requirements for the artist workstations if you are running openPype from a frozen build. + +Each artist computer will need to be able to connect to your central mongo database to load and publish any work. They will also need +access to your centralized project storage, unless you are running a fully distributed pipeline. + +## Centralized and Distributed? + +OpenPype supports a variety of studio setups, for example: + +- Single physical location with monolithic project storage. +- Fully remote studios, utilizing artist's home workstations. +- Distributed studios, running fully or partially on the cloud. +- Hybrid setups with different storages per project. +- And others that we probably didn't think of at all. + +It is totally up to you how you deploy and distribute OpenPype to your artist, but there are a few things to keep in mind: +- While it is possible to store project files in different locations for different artist, it bring a lot of extra complexity +to the table +- Some DCCs do not support using Environment variables in file paths. This will make it very hard to maintain full multiplatform +compatibility as well variable storage roots. +- Relying on VPN connection and using it to work directly of network storage will be painfully slow. + + +## Repositories + +### [OpenPype](https://github.com/pypeclub/pype) + +This is where vast majority of the code that works with your data lives. It acts +as Avalon-Config, if we're speaking in avalon terms. + +Avalon gives us the ability to work with a certain host, say Maya, in a standardized manner, but OpenPype defines **how** we work with all the data, allows most of the behavior to be configured on a very granular level and provides a comprehensive build and installation tools for it. + +Thanks to that, we are able to maintain one codebase for vast majority of the features across all our clients deployments while keeping the option to tailor the pipeline to each individual studio. + +### [Avalon-core](https://github.com/pypeclub/avalon-core) + +Avalon-core is the heart of OpenPype. It provides the base functionality including key GUIs (albeit expanded and modified by us), database connection, standards for data structures, working with entities and some universal tools. + +Avalon is being actively developed and maintained by a community of studios and TDs from around the world, with Pype Club team being an active contributor as well. + +Due to the extensive work we've done on OpenPype and the need to react quickly to production needs, we +maintain our own fork of avalon-core, which is kept up to date with upstream changes as much as possible. diff --git a/website/docs/upgrade_notes.md b/website/docs/upgrade_notes.md new file mode 100644 index 0000000000..dbc90e948d --- /dev/null +++ b/website/docs/upgrade_notes.md @@ -0,0 +1,165 @@ +--- +id: update_notes +title: Update Notes +sidebar_label: Update Notes +--- + + + +## **Updating to 2.13.0** ## + +### MongoDB + +**Must** + +Due to changes in how tasks are stored in the database (we added task types and possibility of more arbitrary data.), we must take a few precautions when updating. +1. Make sure that ftrack event server with sync to avalon is NOT running during the update. +2. Any project that is to be worked on with 2.13 must be synced from ftrack to avalon with the udpated sync to avalon action, or using and updated event server sync to avalon event. + +If 2.12 event servers runs when trying to update the project sync with 2.13, it will override any changes. + +### Nuke Studio / hiero + +Make sure to re-generate pype tags and replace any `task` tags on your shots with the new ones. This will allow you to make multiple tasks of the same type, but with different task name at the same time. + +### Nuke + +Due to a minor update to nuke write node, artists will be prompted to update their write nodes before being able to publish any old shots. There is a "repair" action for this in the publisher, so it doesn't have to be done manually. + + + + +## **Updating to 2.12.0** ## + +### Apps and tools + +**Must** + +run Create/Update Custom attributes action (to update custom attributes group) +check if studio has set custom intent values and move values to ~/config/presets/global/intent.json + +**Optional** + +Set true/false on application and tools by studio usage (eliminate app list in Ftrack and time for registering Ftrack ations) + + + + +## **Updating to 2.11.0** ## + +### Maya in deadline + +We added or own maya deadline plugin to make render management easier. It operates the same as standard mayaBatch in deadline, but allow us to separate Pype sumitted jobs from standard submitter. You'll need to follow this guide to update this [install pype deadline](https://pype.club/docs/admin_hosts#pype-dealine-supplement-code) + + + + +## **Updating to 2.9.0** ## + +### Review and Burnin PRESETS + +This release introduces a major update to working with review and burnin presets. They can now be much more granular and can target extremely specific usecases. The change is backwards compatible with previous format of review and burnin presets, however we highly recommend updating all the presets to the new format. Documentation on what this looks like can be found on pype main [documentation page](https://pype.club/docs/admin_presets_plugins#publishjson). + +### Multiroot and storages + +With the support of multiroot projects, we removed the old `storage.json` from configuration and replaced it with simpler `config/anatomy/roots.json`. This is a required change, but only needs to be done once per studio during the update to 2.9.0. [Read More](https://pype.club/docs/next/admin_config#roots) + + + + +## **Updating to 2.7.0** ## + +### Master Versions +To activate `master` version workflow you need to activate `integrateMasterVersion` plugin in the `config/presets/plugins/global/publish.json` + +``` +"IntegrateMasterVersion": {"enabled": true}, +``` + +### Ftrack + +Make sure that `intent` attributes in ftrack is set correctly. It should follow this setup unless you have your custom values +``` +{ + "label": "Intent", + "key": "intent", + "type": "enumerator", + "entity_type": "assetversion", + "group": "avalon", + "config": { + "multiselect": false, + "data": [ + {"test": "Test"}, + {"wip": "WIP"}, + {"final": "Final"} + ] + } +``` + + + + +## **Updating to 2.6.0** ## + +### Dev vs Prod + +If you want to differentiate between dev and prod deployments of pype, you need to add `config.ini` file to `pype-setup/pypeapp` folder with content. + +``` +[Default] +dev=true +``` + +### Ftrack + +You will have to log in to ftrack in pype after the update. You should be automatically prompted with the ftrack login window when you launch 2.6 release for the first time. + +Event server has to be restarted after the update to enable the ability to control it via action. + +### Presets + +There is a major change in the way how burnin presets are being stored. We simplified the preset format, however that means the currently running production configs need to be tweaked to match the new format. + +:::note Example of converting burnin preset from 2.5 to 2.6 + +2.5 burnin preset + +``` +"burnins":{ + "TOP_LEFT": { + "function": "text", + "text": "{dd}/{mm}/{yyyy}" + }, + "TOP_CENTERED": { + "function": "text", + "text": "" + }, + "TOP_RIGHT": { + "function": "text", + "text": "v{version:0>3}" + }, + "BOTTOM_LEFT": { + "function": "text", + "text": "{frame_start}-{current_frame}-{frame_end}" + }, + "BOTTOM_CENTERED": { + "function": "text", + "text": "{asset}" + }, + "BOTTOM_RIGHT": { + "function": "frame_numbers", + "text": "{username}" + } +``` + +2.6 burnin preset +``` +"burnins":{ + "TOP_LEFT": "{dd}/{mm}/{yyyy}", + "TOP_CENTER": "", + "TOP_RIGHT": "v{version:0>3}" + "BOTTOM_LEFT": "{frame_start}-{current_frame}-{frame_end}", + "BOTTOM_CENTERED": "{asset}", + "BOTTOM_RIGHT": "{username}" +} +``` diff --git a/website/docusaurus.config.js b/website/docusaurus.config.js new file mode 100644 index 0000000000..3ce1cde060 --- /dev/null +++ b/website/docusaurus.config.js @@ -0,0 +1,128 @@ +module.exports = { + title: 'openPYPE', + tagline: 'Pipeline with support, for studios and remote teams.', + url: 'http://openpype.io/', + baseUrl: '/', + organizationName: 'Orbi Tools s.r.o', + projectName: 'openPype', + favicon: 'img/favicon/favicon.ico', + onBrokenLinks: 'ignore', + customFields: { + }, + presets: [ + [ + '@docusaurus/preset-classic', { + docs: { + sidebarPath: require.resolve('./sidebars.js'), + }, + theme: { + customCss: require.resolve('./src/css/custom.css') + } + } + ] + ], + themeConfig: { + colorMode: { + // "light" | "dark" + defaultMode: 'light', + + // Hides the switch in the navbar + // Useful if you want to support a single color mode + disableSwitch: true + }, + announcementBar: { + id: 'help_with_docs', // Any value that will identify this message. + content: + 'This documentation is work in progress, help us make it better.. Current working version is 3.0.0-beta.', + backgroundColor: '#fff', // Defaults to `#fff`. + textColor: '#000', // Defaults to `#000`. + }, + navbar: { + style: 'dark', + title: 'openPYPE', + logo: { + src: 'img/logos/splash_main.svg' + }, + items: [ + { + to: '/features', + label: 'Features', + position: 'left' + }, { + to: 'docs/artist_getting_started', + label: 'User Docs', + position: 'left' + }, { + to: 'docs/system_introduction', + label: 'Admin Docs', + position: 'left' + }, + { + href: 'https://pype.club', + label: 'pypeclub', + position: 'right', + },{ + href: 'https://github.com/pypeclub', + label: 'Github', + position: 'right', + }, + ] + }, + footer: { + style: 'dark', + links: [ + { + title: 'Pages', + items: [ + { + label: 'Features', + to: 'features', + }, + { + label: 'Artist', + to: 'docs/artist_getting_started', + }, + { + label: 'Admin', + to: 'docs/admin_getting_started', + } + ] + }, + { + title: 'Community', + items: [ + { + label: 'Avalon Chat', + to: 'https://gitter.im/getavalon/Lobby', + }, + { + label: 'OpenPype Chat', + to: 'https://discord.gg/sFNPWXG', + }, + { + label: 'Github Discussions', + to: 'https://github.com/pypeclub/pype/discussions', + } + ], + }, + ], + copyright: 'Copyright Β© 2021 Orbi Tools', + }, + algolia: { + apiKey: '5e01ee3bfbb744ca6f25d4b281ce38a9', + indexName: 'openpype', + // Optional: see doc section bellow + contextualSearch: true, + // Optional: Algolia search parameters + searchParameters: {}, + }, + googleAnalytics: { + trackingID: 'G-HHJZ9VF0FG', + // Optional fields. + anonymizeIP: false, // Should IPs be anonymized? + }, + }, + stylesheets: [ + 'https://use.fontawesome.com/releases/v5.7.2/css/all.css' + ], +}; diff --git a/website/package.json b/website/package.json new file mode 100644 index 0000000000..7bd8b4e77b --- /dev/null +++ b/website/package.json @@ -0,0 +1,27 @@ +{ + "name": "pype-documentation", + "scripts": { + "examples": "docusaurus-examples", + "start": "docusaurus start", + "build": "docusaurus build", + "publish-gh-pages": "docusaurus-publish", + "write-translations": "docusaurus-write-translations", + "version": "docusaurus-version", + "rename-version": "docusaurus-rename-version", + "swizzle": "docusaurus swizzle", + "deploy": "docusaurus deploy", + "docusaurus": "docusaurus" + }, + "dependencies": { + "@docusaurus/core": "2.0.0-alpha.72", + "@docusaurus/preset-classic": "2.0.0-alpha.72", + "classnames": "^2.2.6", + "clsx": "^1.1.1", + "react": "^16.10.2", + "react-dom": "^16.10.2", + "react-popupbox": "^2.0.8", + "remarkable-admonitions": "^0.2.1", + "yarn": "^1.17.3" + }, + "devDependencies": {} +} diff --git a/website/publish.cmd b/website/publish.cmd new file mode 100644 index 0000000000..f203823bde --- /dev/null +++ b/website/publish.cmd @@ -0,0 +1,5 @@ +cd %~dp0 +set GIT_USER=mkolar +set CURRENT_BRANCH=develop +set USE_SSH=true +yarn deploy diff --git a/website/sidebars.js b/website/sidebars.js new file mode 100644 index 0000000000..4f5b7d604d --- /dev/null +++ b/website/sidebars.js @@ -0,0 +1,124 @@ +module.exports = { + artist: [ + { + type: "category", + collapsed: false, + label: "General", + items: [ + "artist_getting_started", + "artist_concepts", + "artist_publish", + "artist_tools", + ], + }, + { + type: "category", + collapsed: false, + label: "Integrations", + items: [ + "artist_hosts_hiero", + "artist_hosts_nuke", + "artist_hosts_maya", + "artist_hosts_blender", + "artist_hosts_harmony", + "artist_hosts_aftereffects", + "artist_hosts_photoshop", + "artist_hosts_unreal", + { + type: "category", + label: "Ftrack", + items: [ + "artist_ftrack", + "manager_ftrack", + "manager_ftrack_actions", + ], + } + ], + }, + ], + Admin: [ + "system_introduction", + { + type: "category", + label: "Getting Started", + items: [ + "dev_requirements", + "dev_build", + "admin_distribute", + "admin_use", + "dev_contribute", + "admin_openpype_commands", + ], + }, + { + type: "category", + label: "Configuration", + items: [ + "admin_settings", + "admin_settings_system", + "admin_settings_project_anatomy", + { + type: "category", + label: "Project Settings", + items: [ + "project_settings/settings_project_global" + ], + }, + ], + }, + { + type: "category", + label: "Modules", + items: [ + "module_ftrack", + "module_site_sync", + "module_deadline", + "module_muster", + "module_clockify" + ], + }, + { + type: "category", + label: "Integrations", + items: [ + "admin_hosts_blender" + ], + }, + { + type: "category", + label: "Releases", + items: ["changelog", "update_notes"], + }, + { + type: "category", + collapsed: false, + label: "2.0 legacy docs", + items: [ + { + type: "category", + label: "Deployment", + items: [ + "pype2/admin_getting_started", + "pype2/admin_install", + "pype2/admin_config", + "pype2/admin_ftrack", + "pype2/admin_hosts", + "pype2/admin_pype_commands", + "pype2/admin_setup_troubleshooting", + ], + }, + { + type: "category", + label: "Configuration", + items: [ + "pype2/admin_presets_nukestudio", + "pype2/admin_presets_ftrack", + "pype2/admin_presets_maya", + "pype2/admin_presets_plugins", + "pype2/admin_presets_tools", + ], + }, + ], + }, + ], +}; diff --git a/website/src/components/BadgesSection/badges.js b/website/src/components/BadgesSection/badges.js new file mode 100644 index 0000000000..4bc85df2ef --- /dev/null +++ b/website/src/components/BadgesSection/badges.js @@ -0,0 +1,59 @@ +export default { + upper: [ + { + title: "License", + src: + "https://img.shields.io/github/license/pypeclub/pype?labelColor=303846", + href: "https://github.com/pypeclub/pype", + }, + { + title: "Release", + src: + "https://img.shields.io/github/v/release/pypeclub/pype?labelColor=303846", + href: "https://github.com/pypeclub/pype", + }, + { + title: "Requirements State", + src: + "https://img.shields.io/requires/github/pypeclub/pype?labelColor=303846", + href: + "https://requires.io/github/pypeclub/pype/requirements/?branch=main", + }, + { + title: "VFX Platform", + src: + "https://img.shields.io/badge/vfx%20platform-2021-lightgrey?labelColor=303846", + href: "https://vfxplatform.com", + }, + { + title: "GitHub last commit", + src: + "https://img.shields.io/github/last-commit/pypeclub/pype/develop?labelColor=303846", + href: "https://github.com/pypeclub/pype", + }, + { + title: "GitHub commit activity", + src: + "https://img.shields.io/github/commit-activity/y/pypeclub/pype?labelColor=303846", + href: "https://github.com/pypeclub/pype", + }, + { + title: "Repository Size", + src: + "https://img.shields.io/github/repo-size/pypeclub/pype?labelColor=303846", + href: "https://github.com/pypeclub/pype", + }, + { + title: "Forks", + src: + "https://img.shields.io/github/forks/pypeclub/pype?style=social&labelColor=303846", + href: "https://github.com/pypeclub/pype", + }, + { + title: "Discord", + src: + "https://img.shields.io/discord/517362899170230292?label=discord&logo=discord&logoColor=white&labelColor=303846", + href: "https://discord.gg/sFNPWXG", + }, + ], +}; diff --git a/website/src/components/BadgesSection/index.js b/website/src/components/BadgesSection/index.js new file mode 100644 index 0000000000..f782de2a52 --- /dev/null +++ b/website/src/components/BadgesSection/index.js @@ -0,0 +1,25 @@ +import React from 'react'; + +import badges from './badges'; +import styles from './styles.module.css'; +import {StarButton} from "../index"; + +const Badge = props => ( + + {props.title}/ + +); + +export default function BadgesSection() { + const {upper: upperBadges} = badges; + + return ( +
+
+ {upperBadges.map((badge, index) => ( + + ))} +
+
+ ); +}; diff --git a/website/src/components/BadgesSection/styles.module.css b/website/src/components/BadgesSection/styles.module.css new file mode 100644 index 0000000000..4db99e7e06 --- /dev/null +++ b/website/src/components/BadgesSection/styles.module.css @@ -0,0 +1,17 @@ +.badgesSection { + margin-bottom: 5em; +} +.upperBadges, +.lowerBadges { + display: flex; + flex-wrap: wrap; + justify-content: center; +} +.upperBadges { + margin-bottom: 17px; +} +.upperBadges a, +.lowerBadges a { + margin-left: 2px; + margin-right: 2px; +} diff --git a/website/src/components/GithubButton/index.js b/website/src/components/GithubButton/index.js new file mode 100644 index 0000000000..b1950a8d6f --- /dev/null +++ b/website/src/components/GithubButton/index.js @@ -0,0 +1,30 @@ +import React from 'react'; +import useDocusaurusContext from '@docusaurus/useDocusaurusContext'; + +export function StarButton() { + const context = useDocusaurusContext(); + const {siteConfig = {}} = context; + + return + T-Regx + ; +} + +export function SponsorButton() { + return
+