diff --git a/.github/ISSUE_TEMPLATE/bug_report.yml b/.github/ISSUE_TEMPLATE/bug_report.yml
index 9fcb69e2e9..dba39ac36d 100644
--- a/.github/ISSUE_TEMPLATE/bug_report.yml
+++ b/.github/ISSUE_TEMPLATE/bug_report.yml
@@ -35,6 +35,47 @@ body:
label: Version
description: What version are you running? Look to OpenPype Tray
options:
+ - 3.17.3-nightly.1
+ - 3.17.2
+ - 3.17.2-nightly.4
+ - 3.17.2-nightly.3
+ - 3.17.2-nightly.2
+ - 3.17.2-nightly.1
+ - 3.17.1
+ - 3.17.1-nightly.3
+ - 3.17.1-nightly.2
+ - 3.17.1-nightly.1
+ - 3.17.0
+ - 3.16.7
+ - 3.16.7-nightly.2
+ - 3.16.7-nightly.1
+ - 3.16.6
+ - 3.16.6-nightly.1
+ - 3.16.5
+ - 3.16.5-nightly.5
+ - 3.16.5-nightly.4
+ - 3.16.5-nightly.3
+ - 3.16.5-nightly.2
+ - 3.16.5-nightly.1
+ - 3.16.4
+ - 3.16.4-nightly.3
+ - 3.16.4-nightly.2
+ - 3.16.4-nightly.1
+ - 3.16.3
+ - 3.16.3-nightly.5
+ - 3.16.3-nightly.4
+ - 3.16.3-nightly.3
+ - 3.16.3-nightly.2
+ - 3.16.3-nightly.1
+ - 3.16.2
+ - 3.16.2-nightly.2
+ - 3.16.2-nightly.1
+ - 3.16.1
+ - 3.16.0
+ - 3.16.0-nightly.2
+ - 3.16.0-nightly.1
+ - 3.15.12
+ - 3.15.12-nightly.4
- 3.15.12-nightly.3
- 3.15.12-nightly.2
- 3.15.12-nightly.1
@@ -94,47 +135,6 @@ body:
- 3.14.11-nightly.3
- 3.14.11-nightly.2
- 3.14.11-nightly.1
- - 3.14.10
- - 3.14.10-nightly.9
- - 3.14.10-nightly.8
- - 3.14.10-nightly.7
- - 3.14.10-nightly.6
- - 3.14.10-nightly.5
- - 3.14.10-nightly.4
- - 3.14.10-nightly.3
- - 3.14.10-nightly.2
- - 3.14.10-nightly.1
- - 3.14.9
- - 3.14.9-nightly.5
- - 3.14.9-nightly.4
- - 3.14.9-nightly.3
- - 3.14.9-nightly.2
- - 3.14.9-nightly.1
- - 3.14.8
- - 3.14.8-nightly.4
- - 3.14.8-nightly.3
- - 3.14.8-nightly.2
- - 3.14.8-nightly.1
- - 3.14.7
- - 3.14.7-nightly.8
- - 3.14.7-nightly.7
- - 3.14.7-nightly.6
- - 3.14.7-nightly.5
- - 3.14.7-nightly.4
- - 3.14.7-nightly.3
- - 3.14.7-nightly.2
- - 3.14.7-nightly.1
- - 3.14.6
- - 3.14.6-nightly.3
- - 3.14.6-nightly.2
- - 3.14.6-nightly.1
- - 3.14.5
- - 3.14.5-nightly.3
- - 3.14.5-nightly.2
- - 3.14.5-nightly.1
- - 3.14.4
- - 3.14.4-nightly.4
- - 3.14.4-nightly.3
validations:
required: true
- type: dropdown
diff --git a/.github/workflows/miletone_release_trigger.yml b/.github/workflows/miletone_release_trigger.yml
index 4a031be7f9..d755f7eb9f 100644
--- a/.github/workflows/miletone_release_trigger.yml
+++ b/.github/workflows/miletone_release_trigger.yml
@@ -5,12 +5,6 @@ on:
inputs:
milestone:
required: true
- release-type:
- type: choice
- description: What release should be created
- options:
- - release
- - pre-release
milestone:
types: closed
diff --git a/.gitignore b/.gitignore
index 50f52f65a3..622d55fb88 100644
--- a/.gitignore
+++ b/.gitignore
@@ -37,6 +37,7 @@ Temporary Items
###########
/build
/dist/
+/server_addon/packages/*
/vendor/bin/*
/vendor/python/*
diff --git a/CHANGELOG.md b/CHANGELOG.md
index 095e0d96e4..7d5cf2c4d2 100644
--- a/CHANGELOG.md
+++ b/CHANGELOG.md
@@ -1,6 +1,4947 @@
# Changelog
+## [3.17.2](https://github.com/ynput/OpenPype/tree/3.17.2)
+
+
+[Full Changelog](https://github.com/ynput/OpenPype/compare/3.17.1...3.17.2)
+
+### **🆕 New features**
+
+
+
+Maya: Add MayaPy application. #5705
+
+This adds mayapy to the application to be launched from a task.
+
+
+___
+
+
+
+
+
+Feature: Copy resources when downloading last workfile #4944
+
+When the last published workfile is downloaded as a prelaunch hook, all resource files referenced in the workfile representation are copied to the `resources` folder, which is inside the local workfile folder.
+
+
+___
+
+
+
+
+
+Blender: Deadline support #5438
+
+Add Deadline support for Blender.
+
+
+___
+
+
+
+
+
+Fusion: implement toggle to use Deadline plugin FusionCmd #5678
+
+Fusion 17 doesn't work in DL 10.3, but FusionCmd does. It might be probably better option as headless variant.Fusion plugin seems to be closing and reopening application when worker is running on artist machine, not so with FusionCmdAdded configuration to Project Settings for admin to select appropriate Deadline plugin:
+
+
+___
+
+
+
+
+
+Loader tool: Refactor loader tool (for AYON) #5729
+
+Refactored loader tool to new tool. Separated backend and frontend logic. Refactored logic is AYON-centric and is used only in AYON mode, so it does not affect OpenPype. The tool is also replacing library loader.
+
+
+___
+
+
+
+### **🚀 Enhancements**
+
+
+
+Maya: implement matchmove publishing #5445
+
+Add possibility to export multiple cameras in single `matchmove` family instance, both in `abc` and `ma`.Exposed flag 'Keep image planes' to control export of image planes.
+
+
+___
+
+
+
+
+
+Maya: Add optional Fbx extractors in Rig and Animation family #5589
+
+This PR allows user to export control rigs(optionally with mesh) and animated rig in fbx optionally by attaching the rig objects to the two newly introduced sets.
+
+
+___
+
+
+
+
+
+Maya: Optional Resolution Validator for Render #5693
+
+Adding optional resolution validator for maya in render family, similar to the one in Max.It checks if the resolution in render setting aligns with that in setting from the db.
+
+
+___
+
+
+
+
+
+Use host's node uniqueness for instance id in new publisher #5490
+
+Instead of writing `instance_id` as parm or attributes on the publish instances we can, for some hosts, just rely on a unique name or path within the scene to refer to that particular instance. By doing so we fix #4820 because upon duplicating such a publish instance using the host's (DCC) functionality the uniqueness for the duplicate is then already ensured instead of attributes remaining exact same value as where to were duplicated from, making `instance_id` a non-unique value.
+
+
+___
+
+
+
+
+
+Max: Implementation of OCIO configuration #5499
+
+Resolve #5473 Implementation of OCIO configuration for Max 2024 regarding to the update of Max 2024
+
+
+___
+
+
+
+
+
+Nuke: Multiple format supports for ExtractReviewDataMov #5623
+
+This PR would fix the bug of the plugin `ExtractReviewDataMov` not being able to support extensions other than `mov`. The plugin is also renamed to `ExtractReviewDataBakingStreams` as i provides multiple format supoort.
+
+
+___
+
+
+
+
+
+Bugfix: houdini switching context doesnt update variables #5651
+
+Allows admins to have a list of vars (e.g. JOB) with (dynamic) values that will be updated on context changes, e.g. when switching to another asset or task.Using template keys is supported but formatting keys capitalization variants is not, e.g. {Asset} and {ASSET} won't workDisabling Update Houdini vars on context change feature will leave all Houdini vars unmanaged and thus no context update changes will occur.Also, this PR adds a new button in menu to update vars on demand.
+
+
+___
+
+
+
+
+
+Publisher: Fix report maker memory leak + optimize lookups using set #5667
+
+Fixes a memory leak where resetting publisher does not clear the stored plugins for the Publish Report Maker.Also changes the stored plugins to a `set` to optimize the lookup speeds.
+
+
+___
+
+
+
+
+
+Add openpype_mongo command flag for testing. #5676
+
+Instead of changing the environment, this command flag allows for changing the database.
+
+
+___
+
+
+
+
+
+Nuke: minor docstring and code tweaks for ExtractReviewMov #5695
+
+Code and docstring tweaks on https://github.com/ynput/OpenPype/pull/5623
+
+
+___
+
+
+
+
+
+AYON: Small settings fixes #5699
+
+Small changes/fixes related to AYON settings. All foundry apps variant `13-0` has label `13.0`. Key `"ExtractReviewIntermediates"` is not mandatory in settings.
+
+
+___
+
+
+
+
+
+Blender: Alembic Animation loader #5711
+
+Implemented loading Alembic Animations in Blender.
+
+
+___
+
+
+
+### **🐛 Bug fixes**
+
+
+
+Maya: Missing "data" field and enabling of audio #5618
+
+When updating audio containers, the field "data" was missing and the audio node was not enabled on the timeline.
+
+
+___
+
+
+
+
+
+Maya: Bug in validate Plug-in Path Attribute #5687
+
+Overwriting list with string is causing `TypeError: string indices must be integers` in subsequent iterations, crashing the validator plugin.
+
+
+___
+
+
+
+
+
+General: Avoid fallback if value is 0 for handle start/end #5652
+
+There's a bug on the `pyblish_functions.get_time_data_from_instance_or_context` where if `handleStart` or `handleEnd` on the instance are set to value 0 it's falling back to grabbing the handles from the instance context. Instead, the logic should be that it only falls back to the `instance.context` if the key doesn't exist.This change was only affecting me on the `handleStart`/`handleEnd` and it's unlikely it could cause issues on `frameStart`, `frameEnd` or `fps` but regardless, the `get` logic is wrong.
+
+
+___
+
+
+
+
+
+Fusion: added missing env vars to Deadline submission #5659
+
+Environment variables discerning type of job was missing. Without this injection of environment variables won't start.
+
+
+___
+
+
+
+
+
+Nuke: workfile version synchronization settings fixed #5662
+
+Settings for synchronizing workfile version to published products is fixed.
+
+
+___
+
+
+
+
+
+AYON Workfiles Tool: Open workfile changes context #5671
+
+Change context when workfile is opened.
+
+
+___
+
+
+
+
+
+Blender: Fix remove/update in new layout instance #5679
+
+Fixes an error that occurs when removing or updating an asset in a new layout instance.
+
+
+___
+
+
+
+
+
+AYON Launcher tool: Fix refresh btn #5685
+
+Refresh button does propagate refreshed content properly. Folders and tasks are cached for 60 seconds instead of 10 seconds. Auto-refresh in launcher will refresh only actions and related data which is project and project settings.
+
+
+___
+
+
+
+
+
+Deadline: handle all valid paths in RenderExecutable #5694
+
+This commit enhances the path resolution mechanism in the RenderExecutable function of the Ayon plugin. Previously, the function only considered paths starting with a tilde (~), ignoring other valid paths listed in exe_list. This limitation led to an empty expanded_paths list when none of the paths in exe_list started with a tilde, causing the function to fail in finding the Ayon executable.With this fix, the RenderExecutable function now correctly processes and includes all valid paths from exe_list, improving its reliability and preventing unnecessary errors related to Ayon executable location.
+
+
+___
+
+
+
+
+
+AYON Launcher tool: Fix skip last workfile boolean #5700
+
+Skip last workfile boolean works as expected.
+
+
+___
+
+
+
+
+
+Chore: Explore here action can work without task #5703
+
+Explore here action does not crash when task is not selected, and change error message a little.
+
+
+___
+
+
+
+
+
+Testing: Inject mongo_url argument earlier #5706
+
+Fix for https://github.com/ynput/OpenPype/pull/5676The Mongo url is used earlier in the execution.
+
+
+___
+
+
+
+
+
+Blender: Add support to auto-install PySide2 in blender 4 #5723
+
+Change version regex to support blender 4 subfolder.
+
+
+___
+
+
+
+
+
+Fix: Hardcoded main site and wrongly copied workfile #5733
+
+Fixing these two issues:
+- Hardcoded main site -> Replaced by `anatomy.fill_root`.
+- Workfiles can sometimes be copied while they shouldn't.
+
+
+___
+
+
+
+
+
+Bugfix: ServerDeleteOperation asset -> folder conversion typo #5735
+
+Fix ServerDeleteOperation asset -> folder conversion typo
+
+
+___
+
+
+
+
+
+Nuke: loaders are filtering correctly #5739
+
+Variable name for filtering by extensions were not correct - it suppose to be plural. It is fixed now and filtering is working as suppose to.
+
+
+___
+
+
+
+
+
+Nuke: failing multiple thumbnails integration #5741
+
+This handles the situation when `ExtractReviewIntermediates` (previously `ExtractReviewDataMov`) has multiple outputs, including thumbnails that need to be integrated. Previously, integrating the thumbnail representation was causing an issue in the integration process. However, we have now resolved this issue by no longer integrating thumbnails as loadable representations.NOW default is that thumbnail representation are NOT integrated (eg. they will not show up in DB > couldn't be Loaded in Loader) and no `_thumb.jpg` will be left in `render` (most likely) publish folder.IF there would be need to override this behavior, please use `project_settings/global/publish/PreIntegrateThumbnails`
+
+
+___
+
+
+
+
+
+AYON Settings: Fix global overrides #5745
+
+The `output` dictionary that gets passed into `ayon_settings._convert_global_project_settings` gets replaced when converting the settings for `ExtractOIIOTranscode`. This results in `global` not being in the output dictionary and thus the defaults being used and not the project overrides.
+
+
+___
+
+
+
+
+
+Chore: AYON query functions arguments #5752
+
+Fixed how `archived` argument is handled in get subsets/assets function.
+
+
+___
+
+
+
+### **🔀 Refactored code**
+
+
+
+Publisher: Refactor Report Maker plugin data storage to be a dict by plugin.id #5668
+
+Refactor Report Maker plugin data storage to be a dict by `plugin.id`Also fixes `_current_plugin_data` type on `__init__`
+
+
+___
+
+
+
+
+
+Chore: Refactor Resolve into new style HostBase, IWorkfileHost, ILoadHost #5701
+
+Refactor Resolve into new style HostBase, IWorkfileHost, ILoadHost
+
+
+___
+
+
+
+### **Merged pull requests**
+
+
+
+Chore: Maya reduce get project settings calls #5669
+
+Re-use system settings / project settings where we can instead of requerying.
+
+
+___
+
+
+
+
+
+Extended error message when getting subset name #5649
+
+Each Creator is using `get_subset_name` functions which collects context data and fills configured template with placeholders.If any key is missing in the template, non descriptive error is thrown.This should provide more verbose message:
+
+
+___
+
+
+
+
+
+Tests: Remove checks for env var #5696
+
+Env var will be filled in `env_var` fixture, here it is too early to check
+
+
+___
+
+
+
+
+
+
+## [3.17.1](https://github.com/ynput/OpenPype/tree/3.17.1)
+
+
+[Full Changelog](https://github.com/ynput/OpenPype/compare/3.17.0...3.17.1)
+
+### **🆕 New features**
+
+
+
+Unreal: Yeti support #5643
+
+Implemented Yeti support for Unreal.
+
+
+___
+
+
+
+
+
+Houdini: Add Static Mesh product-type (family) #5481
+
+This PR adds support to publish Unreal Static Mesh in Houdini as FBXQuick recap
+- [x] Add UE Static Mesh Creator
+- [x] Dynamic subset name like in Maya
+- [x] Collect Static Mesh Type
+- [x] Update collect output node
+- [x] Validate FBX output node
+- [x] Validate mesh is static
+- [x] Validate Unreal Static Mesh Name
+- [x] Validate Subset Name
+- [x] FBX Extractor
+- [x] FBX Loader
+- [x] Update OP Settings
+- [x] Update AYON Settings
+
+
+___
+
+
+
+
+
+Launcher tool: Refactor launcher tool (for AYON) #5612
+
+Refactored launcher tool to new tool. Separated backend and frontend logic. Refactored logic is AYON-centric and is used only in AYON mode, so it does not affect OpenPype.
+
+
+___
+
+
+
+### **🚀 Enhancements**
+
+
+
+Maya: Use custom staging dir function for Maya renders - OP-5265 #5186
+
+Check for custom staging dir when setting the renders output folder in Maya.
+
+
+___
+
+
+
+
+
+Colorspace: updating file path detection methods #5273
+
+Support for OCIO v2 file rules integrated into the available color management API
+
+
+___
+
+
+
+
+
+Chore: add default isort config #5572
+
+Add default configuration for isort tool
+
+
+___
+
+
+
+
+
+Deadline: set PATH environment in deadline jobs by GlobalJobPreLoad #5622
+
+This PR makes `GlobalJobPreLoad` to set `PATH` environment in deadline jobs so that we don't have to use the full executable path for deadline to launch the dcc app. This trick should save us adding logic to pass houdini patch version and modifying Houdini deadline plugin. This trick should work with other DCCs
+
+
+___
+
+
+
+
+
+nuke: extract review data mov read node with expression #5635
+
+Some productions might have set default values for read nodes, those settings are not colliding anymore now.
+
+
+___
+
+
+
+### **🐛 Bug fixes**
+
+
+
+Maya: Support new publisher for colorsets validation. #5630
+
+Fix `validate_color_sets` for the new publisher.In current `develop` the repair option does not appear due to wrong error raising.
+
+
+___
+
+
+
+
+
+Houdini: Camera Loader fix mismatch for Maya cameras #5584
+
+This PR adds
+- A workaround to match Maya render mask in Houdini
+- `SetCameraResolution` inventory action
+- set camera resolution when loading or updating camera
+
+
+___
+
+
+
+
+
+Nuke: fix set colorspace on writes #5634
+
+Colorspace is set correctly to any write node created from publisher.
+
+
+___
+
+
+
+
+
+TVPaint: Fix review family extraction #5637
+
+Extractor marks representation of review instance with review tag.
+
+
+___
+
+
+
+
+
+AYON settings: Extract OIIO transcode settings #5639
+
+Output definitions of Extract OIIO transcode have name to match OpenPype settings, and the settings are converted to dictionary in settings conversion.
+
+
+___
+
+
+
+
+
+AYON: Fix task type short name conversion #5641
+
+Convert AYON task type short name for OpenPype correctly.
+
+
+___
+
+
+
+
+
+colorspace: missing `allowed_exts` fix #5646
+
+Colorspace module is not failing due to missing `allowed_exts` attribute.
+
+
+___
+
+
+
+
+
+Photoshop: remove trailing underscore in subset name #5647
+
+If {layer} placeholder is at the end of subset name template and not used (for example in `auto_image` where separating it by layer doesn't make any sense) trailing '_' was kept. This updates cleaning logic and extracts it as it might be similar in regular `image` instance.
+
+
+___
+
+
+
+
+
+traypublisher: missing `assetEntity` in context data #5648
+
+Issue with missing `assetEnity` key in context data is not problem anymore.
+
+
+___
+
+
+
+
+
+AYON: Workfiles tool save button works #5653
+
+Fix save as button in workfiles tool.(It is mystery why this stopped to work??)
+
+
+___
+
+
+
+
+
+Max: bug fix delete items from container #5658
+
+Fix the bug shown when clicking "Delete Items from Container" and selecting nothing and press ok.
+
+
+___
+
+
+
+### **🔀 Refactored code**
+
+
+
+Chore: Remove unused functions from Fusion integration #5617
+
+Cleanup unused code from Fusion integration
+
+
+___
+
+
+
+### **Merged pull requests**
+
+
+
+Increase timout for deadline test #5654
+
+DL picks up jobs quite slow, so bump up delay.
+
+
+___
+
+
+
+
+
+
+## [3.17.0](https://github.com/ynput/OpenPype/tree/3.17.0)
+
+
+[Full Changelog](https://github.com/ynput/OpenPype/compare/3.16.7...3.17.0)
+
+### **🚀 Enhancements**
+
+
+
+Chore: Remove schema from OpenPype root #5355
+
+Remove unused schema directory in root of repository which was moved inside openpype/pipeline/schema.
+
+
+___
+
+
+
+
+
+Igniter: Allow custom Qt scale factor rounding policy #5554
+
+Do not force `PassThrough` rounding policy if different policy is defined via env variable.
+
+
+___
+
+
+
+### **🐛 Bug fixes**
+
+
+
+Chore: Lower urllib3 to support older OpenSSL #5538
+
+Lowered `urllib3` to `1.26.16` to support older OpenSSL.
+
+
+___
+
+
+
+
+
+Chore: Do not try to add schema to zip files #5557
+
+Do not add `schema` folder to zip file. This fixes issue cause by https://github.com/ynput/OpenPype/pull/5355 .
+
+
+___
+
+
+
+
+
+Chore: Lower click dependency version #5629
+
+Lower click version to support older versions of python.
+
+
+___
+
+
+
+### **Merged pull requests**
+
+
+
+Bump certifi from 2023.5.7 to 2023.7.22 #5351
+
+Bumps [certifi](https://github.com/certifi/python-certifi) from 2023.5.7 to 2023.7.22.
+
+Commits
+
+
+
+
+
+[](https://docs.github.com/en/github/managing-security-vulnerabilities/about-dependabot-security-updates#about-compatibility-scores)
+
+You can trigger a rebase of this PR by commenting `@dependabot rebase`.
+
+[//]: # (dependabot-automerge-start)
+[//]: # (dependabot-automerge-end)
+
+---
+
+
+Dependabot commands and options
+
+
+You can trigger Dependabot actions by commenting on this PR:
+- `@dependabot rebase` will rebase this PR
+- `@dependabot recreate` will recreate this PR, overwriting any edits that have been made to it
+- `@dependabot merge` will merge this PR after your CI passes on it
+- `@dependabot squash and merge` will squash and merge this PR after your CI passes on it
+- `@dependabot cancel merge` will cancel a previously requested merge and block automerging
+- `@dependabot reopen` will reopen this PR if it is closed
+- `@dependabot close` will close this PR and stop Dependabot recreating it. You can achieve the same result by closing it manually
+- `@dependabot ignore this major version` will close this PR and stop Dependabot creating any more for this major version (unless you reopen the PR or upgrade to it yourself)
+- `@dependabot ignore this minor version` will close this PR and stop Dependabot creating any more for this minor version (unless you reopen the PR or upgrade to it yourself)
+- `@dependabot ignore this dependency` will close this PR and stop Dependabot creating any more for this dependency (unless you reopen the PR or upgrade to it yourself)
+You can disable automated security fix PRs for this repo from the [Security Alerts page](https://github.com/ynput/OpenPype/network/alerts).
+
+
+
+> **Note**
+> Automatic rebases have been disabled on this pull request as it has been open for over 30 days.
+
+___
+
+
+
+
+
+
+## [3.16.7](https://github.com/ynput/OpenPype/tree/3.16.7)
+
+
+[Full Changelog](https://github.com/ynput/OpenPype/compare/3.16.6...3.16.7)
+
+### **🆕 New features**
+
+
+
+Maya: Extract active view as thumbnail when no thumbnail set #5426
+
+This sets the Maya instance's thumbnail to the current active view if no thumbnail was set yet.
+
+
+___
+
+
+
+
+
+Maya: Implement USD publish and load using native `mayaUsdPlugin` #5573
+
+Implement Creator and Loaders for extraction and loading of USD files using Maya's own `mayaUsdPlugin`.Also adds support to load a `usd` file into an Arnold Standin (`aiStandin`) and assigning looks to it.
+
+
+___
+
+
+
+
+
+AYON: Ignore separated modules #5619
+
+Do not load already separated modules from default directory.
+
+
+___
+
+
+
+### **🚀 Enhancements**
+
+
+
+Maya: Reduce amount of code for Collect Looks #5253
+
+- Refactor `get_file_node_files` because popping from `paths` by index should have been done in reversed order anyway. It's now changed to not need popping at all.
+- Removed unused `RENDERER_NODE_TYPES` and if-branch which collected `node_attrs` list which was unused + collected members which was also done outside of the if branch and thus generated no extra data.
+- Collected all materials from look set attributes at once instead of multiple queries
+- Collected all file nodes in history from a single query instead of per type
+- Restructured assignment of `instance.data["resources"]` to be more readable
+- Cached `PXR_NODES` only ones (Note: plugin load is checked on discovery of the collect look plugin) instead of querying plugin load and its nodes per file node per attribute
+- Removed some debug logs or combined some messages
+
+
+___
+
+
+
+
+
+AYON: Mark deprecated settings in Maya #5627
+
+Added deprecated info to docstrings of maya colormanagement settings.Resolves: https://github.com/ynput/OpenPype/issues/5556
+
+
+___
+
+
+
+
+
+Max: switching versions of maxScene maintain parentage/links with the loaders #5424
+
+When using scene inventory to manage or update the version of the loading objects, the linked modifiers or parentage of the objects would be kept.Meanwhile, loaded objects from all loaders no longer parented to the container with OP Data.
+
+
+___
+
+
+
+
+
+3ds max: small tweaks to obj extractor and model publishing flow #5605
+
+There migh be situation where OBJ Extractor passes without failure, but no obj file is produced. This is adding simple check directly into the extractor to catch it earlier then in the integration phase. Also switched `Validate USD Plugin` to optional, because it was always run no matter if the Extract USD was enabled or not, hindering testing (and publishing).
+
+
+___
+
+
+
+
+
+TVPaint: Plugin can be reopened #5610
+
+TVPaint plugin can be reopened.
+
+
+___
+
+
+
+
+
+Maya: Remove context prompt #5632
+
+More of a plea than a PR, but could we please remove the context prompt in Maya when switching tasks?
+
+
+___
+
+
+
+
+
+General: Create a desktop icon is checked #5636
+
+In OP Installer `Create a desktop icon` is checked by default.
+___
+
+
+
+### **🐛 Bug fixes**
+
+
+
+Maya: Extract look is not AYON compatible - OP-5375 #5341
+
+The textures that would use hardlinking are going through texture processors. Currently all texture processors are hardcoded to copy texture instead of respecting the settings of forcing to copy.The texture processors were last modified 4 months ago, so effectively all clients that are on any pipeline updated in the last 4 months wont be utilizing hardlinking at all, since the hardcoded texture processors will copy texture no matter the OS.This opts for completely disabling the hardlinking feature, while we figure out what to do about it.
+
+
+___
+
+
+
+
+
+Maya: Multiverse USD Override inherit from correct new style creator #5566
+
+Fix Creator for Multiverse USD Override by inheriting from correct new style creator class type
+
+
+___
+
+
+
+
+
+Max: Bug Fix Alembic Loaders with Ornatrix #5434
+
+Bugfix the alembic loader with both ornatrix alembic and max alembic supportsAdd the ornatrix alembic loaders for loading the alembic with Ornatrix-related modifiers.
+
+
+___
+
+
+
+
+
+AYON: Avoid creation of duplicated links #5593
+
+Handle cases when an existing link should be recreated and do not create the same link multitple times during single publishing.
+
+
+___
+
+
+
+
+
+Extract Review: Multilayer specification for ffmpeg #5613
+
+Extract review is specifying layer name when exr is multilayer.
+
+
+___
+
+
+
+
+
+Fussion: added support for Fusion 17 #5614
+
+Fusion 17 still uses Python 3.6 which causes issues with some our delivered libraries. Vendorized necessary set for Python 3.6
+
+
+___
+
+
+
+
+
+Publisher: Fix screenshot widget #5615
+
+Use correct super method name.EDITED:Removed fade animation which is not triggered at some cases, e.g. in Nuke the animation does not start. I do expect that is caused by `exec_` on the dialog, which blocks event processing to the animation, even when I've added the window as parent it still didn't trigger registered callback.Modified how the "empty" space is not filled by using paths instead of clear mode on painter. Added render hints to add antialiasing.
+
+
+___
+
+
+
+
+
+Photoshop: auto_images without alpha will not fail #5620
+
+ExtractReview caused issue on `auto_image` instance without alpha channel, this fixes it.
+
+
+___
+
+
+
+
+
+Fix - _id key used instead of id in get_last_version_by_subset_name #5626
+
+Just 'id' is not returned because value in fields. Caused KeyError.
+
+
+___
+
+
+
+
+
+Bugfix: create symlinks for ssl libs on Centos 7 #5633
+
+Docker build was missing `libssl.1.1.so` and `libcrypto.1.1.so` symlinks needed by the executable itself, because Python is now explicitly built with OpenSSL 1.1.1
+
+
+___
+
+
+
+### **📃 Documentation**
+
+
+
+Documentation/local settings #5102
+
+I completed the "Working with local settings" page. I updated the screenshot, wrote an explanation for each empty category, and if available, linked the more detailed pages already existing. I also added the "Environments" category.
+
+
+___
+
+
+
+
+
+
+## [3.16.6](https://github.com/ynput/OpenPype/tree/3.16.6)
+
+
+[Full Changelog](https://github.com/ynput/OpenPype/compare/3.16.5...3.16.6)
+
+### **🆕 New features**
+
+
+
+Workfiles tool: Refactor workfiles tool (for AYON) #5550
+
+Refactored workfiles tool to new tool. Separated backend and frontend logic. Refactored logic is AYON-centric and is used only in AYON mode, so it does not affect OpenPype.
+
+
+___
+
+
+
+
+
+AfterEffects: added validator for missing files in FootageItems #5590
+
+Published composition in AE could contain multiple FootageItems as a layers. If FootageItem contains imported file and it doesn't exist, render triggered by Publish process will silently fail and no output is generated. This could cause failure later in the process with unclear reason. (In `ExtractReview`).This PR adds validation to protect from this.
+
+
+___
+
+
+
+### **🚀 Enhancements**
+
+
+
+Maya: Yeti Cache Include viewport preview settings from source #5561
+
+When publishing and loading yeti caches persist the display output and preview colors + settings to ensure consistency in the view
+
+
+___
+
+
+
+
+
+Houdini: validate colorspace in review rop #5322
+
+Adding a validator that checks if 'OCIO Colorspace' parameter on review rop was set to a valid value.It is a step towards managing colorspace in review ropvalid values are the ones in the dropdown menuthis validator also provides some helper actions This PR is related to #4836 and #4833
+
+
+___
+
+
+
+
+
+Colorspace: adding abstraction of publishing related functions #5497
+
+The functionality of Colorspace has been abstracted for greater usability.
+
+
+___
+
+
+
+
+
+Nuke: removing redundant workfile colorspace attributes #5580
+
+Nuke root workfile colorspace data type knobs are long time configured automatically via config roles or the default values are also working well. Therefore there is no need for pipeline managed knobs.
+
+
+___
+
+
+
+
+
+Ftrack: Less verbose logs for Ftrack integration in artist facing logs #5596
+
+- Reduce artist-facing logs for component integration for Ftrack
+- Avoid "Comment is not set" log in artist facing report for Kitsu and Ftrack
+- Remove info log about `ffprobe` inspecting a file (changed to debug log)
+- interesting to see however that it ffprobes the same jpeg twice - but maybe once for thumbnail?
+
+
+___
+
+
+
+### **🐛 Bug fixes**
+
+
+
+Maya: Fix rig validators for new out_SET and controls_SET names #5595
+
+Fix usage of `out_SET` and `controls_SET` since #5310 because they can now be prefixed by the subset name.
+
+
+___
+
+
+
+
+
+TrayPublisher: set default frame values to sequential data #5530
+
+We are inheriting default frame handles and fps data either from project or setting them to 0. This is just for case a production will decide not to injest the sequential representations with asset based metadata.
+
+
+___
+
+
+
+
+
+Publisher: Screenshot opacity value fix #5576
+
+Fix opacity value.
+
+
+___
+
+
+
+
+
+AfterEffects: fix imports of image sequences #5581
+
+#4602 broke imports of image sequences.
+
+
+___
+
+
+
+
+
+AYON: Fix representation context conversion #5591
+
+Do not fix `"folder"` key in representation context until it is needed.
+
+
+___
+
+
+
+
+
+ayon-nuke: default factory to lists #5594
+
+Default factory were missing in settings schemas for complicated objects like lists and it was causing settings to be failing saving.
+
+
+___
+
+
+
+
+
+Maya: Fix look assigner showing no asset if 'not found' representations are present #5597
+
+Fix Maya Look assigner failing to show any content if it finds an invalid container for which it can't find the asset in the current project. (This can happen when e.g. loading something from a library project).There was logic already to avoid this but there was a bug where it used variable `_id` which did not exist and likely had to be `asset_id`.I've fixed that and improved the logged message a bit, e.g.:
+```
+// Warning: openpype.hosts.maya.tools.mayalookassigner.commands : Id found on 22 nodes for which no asset is found database, skipping '641d78ec85c3c5b102e836b0'
+```
+Example not found representation in Loader:The issue isn't necessarily related to NOT FOUND representations but in essence boils down to finding nodes with asset ids that do not exist in the current project which could very well just be local meshes in your scene.**Note:**I've excluded logging the nodes themselves because that tends to be a very long list of nodes. Only downside to removing that is that it's unclear which nodes are related to that `id`. If there are any ideas on how to still provide a concise informational message about that that'd be great so I could add it. Things I had considered:
+- Report the containers, issue here is that it's about asset ids on nodes which don't HAVE to be in containers - it could be local geometry
+- Report the namespaces, issue here is that it could be nodes without namespaces (plus potentially not about ALL nodes in a namespace)
+- Report the short names of the nodes; it's shorter and readable but still likely a lot of nodes.@tokejepsen @LiborBatek any other ideas?
+
+
+___
+
+
+
+
+
+Photoshop: fixed blank Flatten image #5600
+
+Flatten image is simplified publishing approach where all visible layers are "flatten" and published together. This image could be used as a reference etc.This is implemented by auto creator which wasn't updated after first publish. This would result in missing newly created layers after `auto_image` instance was created.
+
+
+___
+
+
+
+
+
+Blender: Remove Hardcoded Subset Name for Reviews #5603
+
+Fixes hardcoded subset name for Reviews in Blender.
+
+
+___
+
+
+
+
+
+TVPaint: Fix tool callbacks #5608
+
+Do not wait for callback to finish.
+
+
+___
+
+
+
+### **🔀 Refactored code**
+
+
+
+Chore: Remove unused variables and cleanup #5588
+
+Removing some unused variables. In some cases the unused variables _seemed like they should've been used - maybe?_ so please **double check the code whether it doesn't hint to an already existing bug**.Also tweaked some other small bugs in code + tweaked logging levels.
+
+
+___
+
+
+
+### **Merged pull requests**
+
+
+
+Chore: Loader log deprecation warning for 'fname' attribute #5587
+
+Since https://github.com/ynput/OpenPype/pull/4602 the `fname` attribute on the `LoaderPlugin` should've been deprecated and set for removal over time. However, no deprecation warning was logged whatsoever and thus one usage appears to have sneaked in (fixed with this PR) and a new one tried to sneak in with a recent PR
+
+
+___
+
+
+
+
+
+
+## [3.16.5](https://github.com/ynput/OpenPype/tree/3.16.5)
+
+
+[Full Changelog](https://github.com/ynput/OpenPype/compare/3.16.4...3.16.5)
+
+### **🆕 New features**
+
+
+
+Attribute Definitions: Multiselection enum def #5547
+
+Added `multiselection` option to `EnumDef`.
+
+
+___
+
+
+
+### **🚀 Enhancements**
+
+
+
+Farm: adding target collector #5494
+
+Enhancing farm publishing workflow.
+
+
+___
+
+
+
+
+
+Maya: Optimize validate plug-in path attributes #5522
+
+- Optimize query (use `cmds.ls` once)
+- Add Select Invalid action
+- Improve validation report
+- Avoid "Unknown object type" errors
+
+
+___
+
+
+
+
+
+Maya: Remove Validate Instance Attributes plug-in #5525
+
+Remove Validate Instance Attributes plug-in.
+
+
+___
+
+
+
+
+
+Enhancement: Tweak logging for artist facing reports #5537
+
+Tweak the logging of publishing for global, deadline, maya and a fusion plugin to have a cleaner artist-facing report.
+- Fix context being reported correctly from CollectContext
+- Fix ValidateMeshArnoldAttributes: fix when arnold is not loaded, fix applying settings, fix for when ai attributes do not exist
+
+
+___
+
+
+
+
+
+AYON: Update settings #5544
+
+Updated settings in AYON addons and conversion of AYON settings in OpenPype.
+
+
+___
+
+
+
+
+
+Chore: Removed Ass export script #5560
+
+Removed Arnold render script, which was obsolete and unused.
+
+
+___
+
+
+
+
+
+Nuke: Allow for knob values to be validated against multiple values. #5042
+
+Knob values can now be validated against multiple values, so you can allow write nodes to be `exr` and `png`, or `16-bit` and `32-bit`.
+
+
+___
+
+
+
+
+
+Enhancement: Cosmetics for Higher version of publish already exists validation error #5190
+
+Fix double spaces in message.Example output **after** the PR:
+
+
+___
+
+
+
+
+
+Nuke: publish existing frames on farm #5409
+
+This PR proposes adding a fourth option in Nuke render publish called "Use Existing Frames - Farm". This would be useful when the farm is busy or when the artist lacks enough farm licenses. Additionally, some artists prefer rendering on the farm but still want to check frames before publishing.By adding the "Use Existing Frames - Farm" option, artists will have more flexibility and control over their render publishing process. This enhancement will streamline the workflow and improve efficiency for Nuke users.
+
+
+___
+
+
+
+
+
+Unreal: Create project in temp location and move to final when done #5476
+
+Create Unreal project in local temporary folder and when done, move it to final destination.
+
+
+___
+
+
+
+
+
+TrayPublisher: adding audio product type into default presets #5489
+
+Adding Audio product type into default presets so anybody can publish audio to their shots.
+
+
+___
+
+
+
+
+
+Global: avoiding cleanup of flagged representation #5502
+
+Publishing folder can be flagged as persistent at representation level.
+
+
+___
+
+
+
+
+
+General: missing tag could raise error #5511
+
+- avoiding potential situation where missing Tag key could raise error
+
+
+___
+
+
+
+
+
+Chore: Queued event system #5514
+
+Implemented event system with more expected behavior of event system. If an event is triggered during other event callback, it is not processed immediately but waits until all callbacks of previous events are done. The event system also allows to not trigger events directly once `emit_event` is called which gives option to process events in custom loops.
+
+
+___
+
+
+
+
+
+Publisher: Tweak log message to provide plugin name after "Plugin" #5521
+
+Fix logged message for settings automatically applied to plugin attributes
+
+
+___
+
+
+
+
+
+Houdini: Improve VDB Selection #5523
+
+Improves VDB selection if selection is `SopNode`: return the selected sop nodeif selection is `ObjNode`: get the output node with the minimum 'outputidx' or the node with display flag
+
+
+___
+
+
+
+
+
+Maya: Refactor/tweak Validate Instance In same Context plug-in #5526
+
+- Chore/Refactor: Re-use existing select invalid and repair actions
+- Enhancement: provide more elaborate PublishValidationError report
+- Bugfix: fix "optional" support by using `OptionalPyblishPluginMixin` base class.
+
+
+___
+
+
+
+
+
+Enhancement: Update houdini main menu #5527
+
+This PR adds two updates:
+- dynamic main menu
+- dynamic asset name and task
+
+
+___
+
+
+
+
+
+Houdini: Reset FPS when clicking Set Frame Range #5528
+
+_Similar to Maya,_ Make `Set Frame Range` resets FPS, issue https://github.com/ynput/OpenPype/issues/5516
+
+
+___
+
+
+
+
+
+Enhancement: Deadline plugins optimize, cleanup and fix optional support for validate deadline pools #5531
+
+- Fix optional support of validate deadline pools
+- Query deadline webservice only once per URL for verification, and once for available deadline pools instead of for every instance
+- Use `deadlineUrl` in `instance.data` when validating pools if it is set.
+- Code cleanup: Re-use existing `requests_get` implementation
+
+
+___
+
+
+
+
+
+Chore: PowerShell script for docker build #5535
+
+Added PowerShell script to run docker build.
+
+
+___
+
+
+
+
+
+AYON: Deadline expand userpaths in executables list #5540
+
+Expande `~` paths in executables list.
+
+
+___
+
+
+
+
+
+Chore: Use correct git url #5542
+
+Fixed github url in README.md.
+
+
+___
+
+
+
+
+
+Chore: Create plugin does not expect system settings #5553
+
+System settings are not passed to initialization of create plugin initialization (and `apply_settings`).
+
+
+___
+
+
+
+
+
+Chore: Allow custom Qt scale factor rounding policy #5555
+
+Do not force `PassThrough` rounding policy if different policy is defined via env variable.
+
+
+___
+
+
+
+
+
+Houdini: Fix outdated containers pop-up on opening last workfile on launch #5567
+
+Fix Houdini not showing outdated containers pop-up on scene open when launching with last workfile argument
+
+
+___
+
+
+
+
+
+Houdini: Improve errors e.g. raise PublishValidationError or cosmetics #5568
+
+Improve errors e.g. raise PublishValidationError or cosmeticsThis also fixes the Increment Current File plug-in since due to an invalid import it was previously broken
+
+
+___
+
+
+
+
+
+Fusion: Code updates #5569
+
+Update fusion code which contains obsolete code. Removed `switch_ui.py` script from fusion with related script in scripts.
+
+
+___
+
+
+
+### **🐛 Bug fixes**
+
+
+
+Maya: Validate Shape Zero fix repair action + provide informational artist-facing report #5524
+
+Refactor to PublishValidationError to allow the RepairAction to work + provide informational report message
+
+
+___
+
+
+
+
+
+Maya: Fix attribute definitions for `CreateYetiCache` #5574
+
+Fix attribute definitions for `CreateYetiCache`
+
+
+___
+
+
+
+
+
+Max: Optional Renderable Camera Validator for Render Instance #5286
+
+Optional validation to check on renderable camera being set up correctly for deadline submission.If not being set up correctly, it wont pass the validation and user can perform repair actions.
+
+
+___
+
+
+
+
+
+Max: Adding custom modifiers back to the loaded objects #5378
+
+The custom parameters OpenpypeData doesn't show in the loaded container when it is being loaded through the loader.
+
+
+___
+
+
+
+
+
+Houdini: Use default_variant to Houdini Node TAB Creator #5421
+
+Use the default variant of the creator plugins on the interactive creator from the TAB node search instead of hard-coding it to `Main`.
+
+
+___
+
+
+
+
+
+Nuke: adding inherited colorspace from instance #5454
+
+Thumbnails are extracted with inherited colorspace collected from rendering write node.
+
+
+___
+
+
+
+
+
+Add kitsu credentials to deadline publish job #5455
+
+This PR hopefully fixes this issue #5440
+
+
+___
+
+
+
+
+
+AYON: Fill entities during editorial #5475
+
+Fill entities and update template data on instances during extract AYON hierarchy.
+
+
+___
+
+
+
+
+
+Ftrack: Fix version 0 when integrating to Ftrack - OP-6595 #5477
+
+Fix publishing version 0 to Ftrack.
+
+
+___
+
+
+
+
+
+OCIO: windows unc path support in Nuke and Hiero #5479
+
+Hiero and Nuke is not supporting windows unc path formatting in OCIO environment variable.
+
+
+___
+
+
+
+
+
+Deadline: Added super call to init #5480
+
+DL 10.3 requires plugin inheriting from DeadlinePlugin to call super's **init** explicitly.
+
+
+___
+
+
+
+
+
+Nuke: fixing thumbnail and monitor out root attributes #5483
+
+Nuke Root Colorspace settings for Thumbnail and Monitor Out schema was gradually changed between version 12, 13, 14 and we needed to address those changes individually for particular version.
+
+
+___
+
+
+
+
+
+Nuke: fixing missing `instance_id` error #5484
+
+Workfiles with Instances created in old publisher workflow were rising error during converting method since they were missing `instance_id` key introduced in new publisher workflow.
+
+
+___
+
+
+
+
+
+Nuke: existing frames validator is repairing render target #5486
+
+Nuke is now correctly repairing render target after the existing frames validator finds missing frames and repair action is used.
+
+
+___
+
+
+
+
+
+added UE to extract burnins families #5487
+
+This PR fixes missing burnins in reviewables when rendering from UE.
+___
+
+
+
+
+
+Harmony: refresh code for current Deadline #5493
+
+- Added support in Deadline Plug-in for new versions of Harmony, in particular version 21 and 22.
+- Remove review=False flag on render instance
+- Add farm=True flag on render instance
+- Fix is_in_tests function call in Harmony Deadline submission plugin
+- Force HarmonyOpenPype.py Deadline Python plug-in to py3
+- Fix cosmetics/hound in HarmonyOpenPype.py Deadline Python plug-in
+
+
+___
+
+
+
+
+
+Publisher: Fix multiselection value #5505
+
+Selection of multiple instances in Publisher does not cause that all instances change all publish attributes to the same value.
+
+
+___
+
+
+
+
+
+Publisher: Avoid warnings on thumbnails if source image also has alpha channel #5510
+
+Avoids the following warning from `ExtractThumbnailFromSource`:
+```
+// pyblish.ExtractThumbnailFromSource : oiiotool WARNING: -o : Can't save 4 channels to jpeg... saving only R,G,B
+```
+
+
+
+___
+
+
+
+
+
+Update ayon-python-api #5512
+
+Update ayon python api and related callbacks.
+
+
+___
+
+
+
+
+
+Max: Fixing the bug of falling back to use workfile for Arnold or any renderers except Redshift #5520
+
+Fix the bug of falling back to use workfile for Arnold
+
+
+___
+
+
+
+
+
+General: Fix Validate Publish Dir Validator #5534
+
+Nonsensical "family" key was used instead of real value (as 'render' etc.) which would result in wrong translation of intermediate family names.Updated docstring.
+
+
+___
+
+
+
+
+
+have the addons loading respect a custom AYON_ADDONS_DIR #5539
+
+When using a custom AYON_ADDONS_DIR environment variable that variable is used in the launcher correctly and downloads and extracts addons to there, however when running Ayon does not respect this environment variable
+
+
+___
+
+
+
+
+
+Deadline: files on representation cannot be single item list #5545
+
+Further logic expects that single item files will be only 'string' not 'list' (eg. repre["files"] = "abc.exr" not repre["files"] = ["abc.exr"].This would cause an issue in ExtractReview later.This could happen if DL rendered single frame file with different frame value.
+
+
+___
+
+
+
+
+
+Webpublisher: better encode list values for click #5546
+
+Targets could be a list, original implementation pushed it as a separate items, it must be added as `--targets webpulish --targets filepublish`.`wepublish_routes` handles triggering from UI, changes in `publish_functions` handle triggering from cmd (for tests, api access).
+
+
+___
+
+
+
+
+
+Houdini: Introduce imprint function for correct version in hda loader #5548
+
+Resolve #5478
+
+
+___
+
+
+
+
+
+AYON: Fill entities during editorial (2) #5549
+
+Fix changes made in https://github.com/ynput/OpenPype/pull/5475.
+
+
+___
+
+
+
+
+
+Max: OP Data updates in Loaders #5563
+
+Fix the bug on the loaders not being able to load the objects when iterating key and values with the dict.Max prefers list over the list in dict.
+
+
+___
+
+
+
+
+
+Create Plugins: Better check of overriden '__init__' method #5571
+
+Create plugins do not log warning messages about each create plugin because of wrong `__init__` method check.
+
+
+___
+
+
+
+### **Merged pull requests**
+
+
+
+Tests: fix unit tests #5533
+
+Fixed failing tests.Updated Unreal's validator to match removed general one which had a couple of issues fixed.
+
+
+___
+
+
+
+
+
+
+## [3.16.4](https://github.com/ynput/OpenPype/tree/3.16.4)
+
+
+[Full Changelog](https://github.com/ynput/OpenPype/compare/3.16.3...3.16.4)
+
+### **🆕 New features**
+
+
+
+Feature: Download last published workfile specify version #4998
+
+Setting `workfile_version` key to hook's `self.launch_context.data` allow you to specify the workfile version you want sync service to download if none is matched locally. This is helpful if the last version hasn't been correctly published/synchronized, and you want to recover the previous one (or some you'd like).Version could be set in two ways:
+- OP's absolute version, matching the `version` index in DB.
+- Relative version in reverse order from the last one: `-2`, `-3`...I don't know where I should write documentation about that.
+
+
+___
+
+
+
+### **🚀 Enhancements**
+
+
+
+Maya: allow not creation of group for Import loaders #5427
+
+This PR enhances previous one. All ReferenceLoaders could not wrap imported products into explicit group.Also `Import` Loaders have same options. Control for this is separate in Settings, eg. Reference might wrap loaded items in group, `Import` might not.
+
+
+___
+
+
+
+
+
+3dsMax: Settings for Ayon #5388
+
+Max Addon Setting for Ayon
+
+
+___
+
+
+
+
+
+General: Navigation to Folder from Launcher #5404
+
+Adds an action in launcher to open the directory of the asset.
+
+
+___
+
+
+
+
+
+Chore: Default variant in create plugin #5429
+
+Attribute `default_variant` on create plugins always returns string and if default variant is not filled other ways how to get one are implemented.
+
+
+___
+
+
+
+
+
+Publisher: Thumbnail widget enhancements #5439
+
+Thumbnails widget in Publisher has new 3 options to choose from: Paste (from clipboard), Take screenshot and Browse. Clear button and new options are not visible by default, user must expand options button to show them.
+
+
+___
+
+
+
+
+
+AYON: Update ayon api to '0.3.5' #5460
+
+Updated ayon-python-api to 0.3.5.
+
+
+___
+
+
+
+### **🐛 Bug fixes**
+
+
+
+AYON: Apply unknown ayon settings first #5435
+
+Settings of custom addons are available in converted settings.
+
+
+___
+
+
+
+
+
+Maya: Fix wrong subset name of render family in deadline #5442
+
+New Publisher is creating different subset names than previously which resulted in duplication of `render` string in final subset name of `render` family published on Deadline.This PR solves that, it also fixes issues with legacy instances from old publisher, it matches the subset name as was before.This solves same issue in Max implementation.
+
+
+___
+
+
+
+
+
+Maya: Fix setting of version to workfile instance #5452
+
+If there are multiple instances of renderlayer published, previous logic resulted in unpredictable rewrite of instance family to 'workfile' if `Sync render version with workfile` was on.
+
+
+___
+
+
+
+
+
+Maya: Context plugin shouldn't be tied to family #5464
+
+`Maya Current File` collector was tied to `workfile` unnecessary. It should run even if `workile` instance is not being published.
+
+
+___
+
+
+
+
+
+Unreal: Fix loading hero version for static and skeletal meshes #5393
+
+Fixed a problem with loading hero versions for static ans skeletal meshes.
+
+
+___
+
+
+
+
+
+TVPaint: Fix 'repeat' behavior #5412
+
+Calculation of frames for repeat behavior is working correctly.
+
+
+___
+
+
+
+
+
+AYON: Thumbnails cache and api prep #5437
+
+Moved thumbnails cache from ayon python api to OpenPype and prepare AYON thumbnail resolver for new api functions. Current implementation should work with old and new ayon-python-api.
+
+
+___
+
+
+
+
+
+Nuke: Name of the Read Node should be updated correctly when switching versions or assets. #5444
+
+Bug fixing of the read node's name not being updated correctly when setting version or switching asset.
+
+
+___
+
+
+
+
+
+Farm publishing: asymmetric handles fixed #5446
+
+Handles are now set correctly on farm published product version if asymmetric were set to shot attributes.
+
+
+___
+
+
+
+
+
+Scene Inventory: Provider icons fix #5450
+
+Fix how provider icons are accessed in scene inventory.
+
+
+___
+
+
+
+
+
+Fix typo on Deadline OP plugin name #5453
+
+Surprised that no one has hit this bug yet... but it seems like there was a typo on the name of the OP Deadline plugin when submitting jobs to it.
+
+
+___
+
+
+
+
+
+AYON: Fix version attributes update #5472
+
+Fixed updates of attribs in AYON mode.
+
+
+___
+
+
+
+### **Merged pull requests**
+
+
+
+Added missing defaults for import_loader #5447
+
+
+___
+
+
+
+
+
+Bug: Local settings don't open on 3.14.7 #5220
+
+### Before posting a new ticket, have you looked through the documentation to find an answer?
+
+Yes I have
+
+### Have you looked through the existing tickets to find any related issues ?
+
+Not yet
+
+### Author of the bug
+
+@FadyFS
+
+### Version
+
+3.15.11-nightly.3
+
+### What platform you are running OpenPype on?
+
+Linux / Centos
+
+### Current Behavior:
+
+the previous behavior (bug) :
+
+
+
+### Expected Behavior:
+
+
+
+
+### What type of bug is it ?
+
+Happened only once in a particular configuration
+
+### Which project / workfile / asset / ...
+
+open settings with 3.14.7
+
+### Steps To Reproduce:
+
+1. Run openpype on the 3.15.11-nightly.3 version
+2. Open settings in 3.14.7 version
+
+### Relevant log output:
+
+_No response_
+
+### Additional context:
+
+_No response_
+
+___
+
+
+
+
+
+Tests: Add automated targets for tests #5443
+
+Without it plugins with 'automated' targets won't be triggered (eg `CloseAE` etc.)
+
+
+___
+
+
+
+
+
+
+## [3.16.3](https://github.com/ynput/OpenPype/tree/3.16.3)
+
+
+[Full Changelog](https://github.com/ynput/OpenPype/compare/3.16.2...3.16.3)
+
+### **🆕 New features**
+
+
+
+AYON: 3rd party addon usage #5300
+
+Prepare OpenPype code to be able use `ayon-third-party` addon which supply ffmpeg and OpenImageIO executables. Because they both can support to define custom arguments (more than one) a new functions were needed to supply.New functions are `get_ffmpeg_tool_args` and `get_oiio_tool_args`. They work similar to previous but instead of string are returning list of strings. All places using previous functions `get_ffmpeg_tool_path` and `get_oiio_tool_path` are now using new ones. They should be backwards compatible and even with addon if returns single argument.
+
+
+___
+
+
+
+
+
+AYON: Addon settings in OpenPype #5347
+
+Moved settings addons to OpenPype server addon. Modified create package to create zip files for server for each settings addon and for openpype addon.
+
+
+___
+
+
+
+
+
+AYON: Add folder to template data #5417
+
+Added `folder` to template data, so `{folder[name]}` can be used in templates.
+
+
+___
+
+
+
+
+
+Option to start versioning from 0 #5262
+
+This PR adds a settings option to start all versioning from 0.This PR will replace #4455.
+
+
+___
+
+
+
+
+
+Ayon: deadline implementation #5321
+
+Quick implementation of deadline in Ayon. New Ayon plugin added for Deadline repository
+
+
+___
+
+
+
+
+
+AYON: Remove AYON launch logic from OpenPype #5348
+
+Removed AYON launch logic from OpenPype. The logic is outdated at this moment and is replaced by `ayon-launcher`.
+
+
+___
+
+
+
+### **🚀 Enhancements**
+
+
+
+Bug: Error on multiple instance rig with maya #5310
+
+I change endswith method by startswith method because the set are automacaly name out_SET, out_SET1, out_SET2 ...
+
+
+___
+
+
+
+
+
+Applications: Use prelaunch hooks to extract environments #5387
+
+Environment variable preparation is based on prelaunch hooks. This should allow to pass OCIO environment variables to farm jobs.
+
+
+___
+
+
+
+
+
+Applications: Launch hooks cleanup #5395
+
+Use `set` instead of `list` for filtering attributes in launch hooks. Celaction hooks dir does not contain `__init__.py`. Celaction prelaunch hook is reusing `CELACTION_ROOT_DIR`. Launch hooks are using full import from `openpype.lib.applications`.
+
+
+___
+
+
+
+
+
+Applications: Environment variables order #5245
+
+Changed order of set environment variables. First are set context environment variables and then project environment overrides. Also asset and task environemnt variables are optional.
+
+
+___
+
+
+
+
+
+Autosave preferences can be read after Nuke opens the script #5295
+
+Looks like I need to open the script in Nuke to be able to correctly load the autosave preferences.This PR reads the Nuke script in context, and offers owerwriting the current script with autosaved one if autosave exists.
+
+
+___
+
+
+
+
+
+Resolve: Update with compatible resolve version and latest docs #5317
+
+Missing information about compatible Resolve version and latest docs from https://github.com/ynput/OpenPype/tree/develop/openpype/hosts/resolve
+
+
+___
+
+
+
+
+
+Chore: Remove deprecated functions #5323
+
+Removed functions/classes that are deprecated and marked to be removed.
+
+
+___
+
+
+
+
+
+Nuke Render and Prerender nodes Process Order - OP-3555 #5332
+
+This PR exposes control over the order of processing of the instances, by sorting the instances created. The sorting happens on the `render_order` and subset name. If the knob `render_order` is found on the instance, we'll sort by that first before sorting by subset name.`render_order` instances are processed before nodes without `render_order`. This could be extended in the future by querying other knobs but I dont know of a usecase for this.Hardcoded the creator `order` attribute of the `prerender` class to be before the `render`. Could be exposed to the user/studio but dont know of a use case for this.
+
+
+___
+
+
+
+
+
+Unreal: Python Environment Improvements #5344
+
+Automatically set `UE_PYTHONPATH` as `PYTHONPATH` when launching Unreal.
+
+
+___
+
+
+
+
+
+Unreal: Custom location for Unreal Ayon Plugin #5346
+
+Added a new environment variable `AYON_BUILT_UNREAL_PLUGIN` to set an already existing and built Ayon Plugin for Unreal.
+
+
+___
+
+
+
+
+
+Unreal: Better handling of Exceptions in UE Worker threads #5349
+
+Implemented a new `UEWorker` base class to handle exception during the execution of UE Workers.
+
+
+___
+
+
+
+
+
+Houdini: Add farm toggle on creation menu #5350
+
+Deadline Farm publishing and Rendering for Houdini was possible with this PR #4825 farm publishing is enabled by default some ROP nodes which may surprise new users (like me).I think adding a toggle (on by default) on creation UI is better so that users will be aware that there's a farm option for this publish instance.ROPs Modified :
+- [x] Mantra ROP
+- [x] Karma ROP
+- [x] Arnold ROP
+- [x] Redshift ROP
+- [x] Vray ROP
+
+
+___
+
+
+
+
+
+Ftrack: Sync to avalon settings #5353
+
+Added roles settings for sync to avalon action.
+
+
+___
+
+
+
+
+
+Chore: Schemas inside OpenPype #5354
+
+Moved/copied schemas from repository root inside openpype/pipeline.
+
+
+___
+
+
+
+
+
+AYON: Addons creation enhancements #5356
+
+Enhanced AYON addons creation. Fix issue with `Pattern` typehint. Zip filenames contain version. OpenPype package is skipping modules that are already separated in AYON. Updated settings of addons.
+
+
+___
+
+
+
+
+
+AYON: Update staging icons #5372
+
+Updated staging icons for staging mode.
+
+
+___
+
+
+
+
+
+Enhancement: Houdini Update pointcache labels #5373
+
+To me it's logical to find pointcaches types listed one after another, but they were named differentlySo, I made this PR to update their labels
+
+
+___
+
+
+
+
+
+nuke: split write node product instance features #5389
+
+Improving Write node product instances by allowing precise activation of specific features.
+
+
+___
+
+
+
+
+
+Max: Use the empty modifiers in container to store AYON Parameter #5396
+
+Instead of adding AYON/OP Parameter along with other attributes inside the container, empty modifiers would be created to store AYON/OP custom attributes
+
+
+___
+
+
+
+
+
+AfterEffects: Removed unused imports #5397
+
+Removed unused import from extract local render plugin file.
+
+
+___
+
+
+
+
+
+Nuke: adding BBox knob type to settings #5405
+
+Nuke knob types in settings having new `Box` type for reposition nodes like Crop or Reformat.
+
+
+___
+
+
+
+
+
+SyncServer: Existence of module is optional #5413
+
+Existence of SyncServer module is optional and not required. Added `sync_server` module back to ignored modules when openpype addon is created for AYON. Command `syncserver` is marked as deprecated and redirected to sync server cli.
+
+
+___
+
+
+
+
+
+Webpublisher: Self contain test publish logic #5414
+
+Moved test logic of publishing to webpublisher. Simplified `remote_publish` to remove webpublisher specific logic.
+
+
+___
+
+
+
+
+
+Webpublisher: Cleanup targets #5418
+
+Removed `remote` target from webpublisher and replaced it with 2 targets `webpublisher` and `automated`.
+
+
+___
+
+
+
+
+
+nuke: update server addon settings with box #5419
+
+updtaing nuke ayon server settings for Box option in knob types.
+
+
+___
+
+
+
+### **🐛 Bug fixes**
+
+
+
+Maya: fix validate frame range on review attached to other instances #5296
+
+Fixes situation where frame range validator can't be turned off on models if they are attached to reviewable camera in Maya.
+
+
+___
+
+
+
+
+
+Maya: Apply project settings to creators #5303
+
+Project settings were not applied to the creators.
+
+
+___
+
+
+
+
+
+Maya: Validate Model Content #5336
+
+`assemblies` in `cmds.ls` does not seem to work;
+```python
+
+from maya import cmds
+
+
+content_instance = ['|group2|pSphere1_GEO', '|group2|pSphere1_GEO|pSphere1_GEOShape', '|group1|pSphere1_GEO', '|group1|pSphere1_GEO|pSphere1_GEOShape']
+assemblies = cmds.ls(content_instance, assemblies=True, long=True)
+print(assemblies)
+```
+
+Fixing with string splitting instead.
+
+
+___
+
+
+
+
+
+Bugfix: Maya update defaults variable #5368
+
+So, something was forgotten while moving out from `LegacyCreator` to `NewCreator``LegacyCreator` used `defaults` to list suggested subset names which was changed into `default_variants` in the the `NewCreator`and setting `defaults` to any values has no effect!This update affects:
+- [x] Model
+- [x] Set Dress
+
+
+___
+
+
+
+
+
+Chore: Python 2 support fix #5375
+
+Fix Python 2 support by adding `click` into python 2 dependencies and removing f-string from maya.
+
+
+___
+
+
+
+
+
+Maya: do not create top level group on reference #5402
+
+This PR allows to not wrapping loaded referenced assets in top level group either explicitly for artist or by configuration in Settings.Artists can control group creation in ReferenceLoader options.Default no group creation could be set by emptying `Group Name` in `project_settings/maya/load/reference_loader`
+
+
+___
+
+
+
+
+
+Settings: Houdini & Maya create plugin settings #5436
+
+Fixes related to Maya and Houdini settings. Renamed `defaults` to `default_variants` in plugin settings to match attribute name on create plugin in both OpenPype and AYON settings. Fixed Houdini AYON settings where were missing settings for defautlt varaints and fixed Maya AYON settings where default factory had wrong assignment.
+
+
+___
+
+
+
+
+
+Maya: Hide CreateAnimation #5297
+
+When converting `animation` family or loading a `rig` family, need to include the `animation` creator but hide it in creator context.
+
+
+___
+
+
+
+
+
+Nuke Anamorphic slate - Read pixel aspect from input #5304
+
+When asset pixel aspect differs from rendered pixel aspect, Nuke slate pixel aspect is not longer taken from asset, but is readed via ffprobe.
+
+
+___
+
+
+
+
+
+Nuke - Allow ExtractReviewDataMov with no timecode knob #5305
+
+ExtractReviewDataMov allows to specify file type. Trying to write some other extension than mov fails on generate_mov assuming that mov64_write_timecode knob exists.
+
+
+___
+
+
+
+
+
+Nuke: removing settings schema with defaults for OpenPype #5306
+
+continuation of https://github.com/ynput/OpenPype/pull/5275
+
+
+___
+
+
+
+
+
+Bugfix: Dependency without 'inputLinks' not downloaded #5337
+
+Remove condition that avoids downloading dependency without `inputLinks`.
+
+
+___
+
+
+
+
+
+Bugfix: Houdini Creator use selection even if it was toggled off #5359
+
+When creating many product types (families) one after another without refreshing the creator window manually if you toggled `Use selection` once, all the later product types will use selection even if it was toggled offHere's Before it will keep use selection even if it was toggled off, unless you refresh window manuallyhttps://github.com/ynput/OpenPype/assets/20871534/8b890122-5b53-4c6b-897d-6a2f3aa3388aHere's After it works as expectedhttps://github.com/ynput/OpenPype/assets/20871534/6b1db990-de1b-428e-8828-04ab59a44e28
+
+
+___
+
+
+
+
+
+Houdini: Correct camera selection for karma renderer when using selected node #5360
+
+When user creates the karma rop with selected camera by use selection, it will give the error message of "no render camera found in selection".This PR is to fix the bug of creating karma rop when using selected camera node in Houdini
+
+
+___
+
+
+
+
+
+AYON: Environment variables and functions #5361
+
+Prepare code for ayon-launcher compatibility. Fix ayon launcher subprocess calls, added more checks for `AYON_SERVER_ENABLED`, use ayon launcher suitable environment variables in AYON mode and changed outputs of some functions. Replaced usages of `OPENPYPE_REPOS_ROOT` environment variable with `PACKAGE_DIR` variable -> correct paths are used.
+
+
+___
+
+
+
+
+
+Nuke: farm rendering of prerender ignore roots in nuke #5366
+
+`prerender` family was using wrong subset, same as `render` which should be different.
+
+
+___
+
+
+
+
+
+Bugfix: Houdini update defaults variable #5367
+
+So, something was forgotten while moving out from `LegacyCreator` to `NewCreator``LegacyCreator` used `defaults` to list suggested subset names which was changed into `default_variants` in the the `NewCreator`and setting `defaults` to any values has no effect!This update affects:
+- [x] Arnold ASS
+- [x] Arnold ROP
+- [x] Karma ROP
+- [x] Mantra ROP
+- [x] Redshift ROP
+- [x] VRay ROP
+
+
+___
+
+
+
+
+
+Publisher: Fix create/publish animation #5369
+
+Use geometry movement instead of changing min/max width.
+
+
+___
+
+
+
+
+
+Unreal: Move unreal splash screen to unreal #5370
+
+Moved splash screen code to unreal integration and removed import from Igniter.
+
+
+___
+
+
+
+
+
+Nuke: returned not cleaning of renders folder on the farm #5374
+
+Previous PR enabled explicit cleanup of `renders` folder after farm publishing. This is not matching customer's workflows. Customer wants to have access to files in `renders` folder and potentially redo some frames for long frame sequences.This PR extends logic of marking rendered files for deletion only if instance doesn't have `stagingDir_persistent`.For backwards compatibility all Nuke instances have `stagingDir_persistent` set to True, eg. `renders` folder won't be cleaned after farm publish.
+
+
+___
+
+
+
+
+
+Nuke: loading sequences is working #5376
+
+Loading image sequences was broken after the latest release, version 3.16. However, I am pleased to inform you that it is now functioning as expected.
+
+
+___
+
+
+
+
+
+AYON: Fix settings conversion for ayon addons #5377
+
+AYON addon settings are available in system settings and does not have available the same values in `"modules"` subkey.
+
+
+___
+
+
+
+
+
+Nuke: OCIO env var workflow #5379
+
+The OCIO environment variable needs to be consistently handled across all platforms. Nuke resolves the custom OCIO config path differently depending on the platform, so we included the ocio config path in the workfile with a partial replacement using an environment variable. Additionally, for Windows sessions, we replaced backward slashes with a TCL expression.
+
+
+___
+
+
+
+
+
+Unreal: Fix Unreal build script #5381
+
+Define 'AYON_UNREAL_ROOT' environment variable in unreal addon.
+
+
+___
+
+
+
+
+
+3dsMax: Use relative path to MAX_HOST_DIR #5382
+
+Use `MAX_HOST_DIR` to calculate startup script path instead of use relative path to `OPENPYPE_ROOT` environment variable.
+
+
+___
+
+
+
+
+
+Bugfix: Houdini abc validator error message #5386
+
+When ABC path validator fails, it prints node objects not node paths or namesThis bug happened because of updating `get_invalid` method to return nodes instead of node pathsBeforeAfter
+
+
+___
+
+
+
+
+
+Nuke: node name influence product (subset) name #5392
+
+Nuke now allows users to duplicate publishing instances, making the workflow easier. By duplicating a node and changing its name, users can set the product (subset) name in the publishing context.Users now have the ability to change the variant name in Publisher, which will automatically rename the associated instance node.
+
+
+___
+
+
+
+
+
+Houdini: delete redundant bgeo sop validator #5394
+
+I found out that this `Validate BGEO SOP Path` validator is redundant, it catches two cases that are already implemented in "Validate Output Node". "Validate Output Node" works with `bgeo` as well as `abc` because `"pointcache"` is listed in its families
+
+
+___
+
+
+
+
+
+Nuke: workfile is not reopening after change of context #5399
+
+Nuke no longer reopens the latest workfile when the context is changed to a different task using the Workfile tool. The issue also affected the Script Clean (from Nuke File menu) and Close feature, but it has now been fixed.
+
+
+___
+
+
+
+
+
+Bugfix: houdini hard coded project settings #5400
+
+I made this PR to solve the issue with hard-coded settings in houdini
+
+
+___
+
+
+
+
+
+AYON: 3dsMax settings #5401
+
+Keep `adsk_3dsmax` group in applications settings.
+
+
+___
+
+
+
+
+
+Bugfix: update defaults to default_variants in maya and houdini OP DCC settings #5407
+
+On moving out to new creator in Maya and Houdini updating settings was missed.
+
+
+___
+
+
+
+
+
+Applications: Attributes creation #5408
+
+Applications addon does not cause infinite server restart loop.
+
+
+___
+
+
+
+
+
+Max: fix the bug of handling Object deletion in OP Parameter #5410
+
+If the object is added to the OP parameter and user delete it in the scene thereafter, it will error out the container with OP attributes. This PR resolves the bug.This PR also fixes the bug of not adding the attribute into OP parameter correctly when the user enables "use selections" to link the object into the OP parameter.
+
+
+___
+
+
+
+
+
+Colorspace: including environments from launcher process #5411
+
+Fixed bug in GitHub PR where the OCIO config template was not properly formatting environment variables from System Settings `general/environment`.
+
+
+___
+
+
+
+
+
+Nuke: workfile template fixes #5428
+
+Some bunch of small bugs needed to be fixed
+
+
+___
+
+
+
+
+
+Houdini, Max: Fix missed function interface change #5430
+
+This PR https://github.com/ynput/OpenPype/pull/5321/files from @kalisp missed updating the `add_render_job_env_var` in Houdini and Max as they are passing an extra arg:
+```
+TypeError: add_render_job_env_var() takes 1 positional argument but 2 were given
+```
+
+
+___
+
+
+
+
+
+Scene Inventory: Fix issue with 'sync_server' #5431
+
+Fix accesss to `sync_server` attribute in scene inventory.
+
+
+___
+
+
+
+
+
+Unpack project: Fix import issue #5433
+
+Added `load_json_file`, `replace_project_documents` and `store_project_documents` to mongo init.
+
+
+___
+
+
+
+
+
+Chore: Versions post fixes #5441
+
+Fixed issues caused by my fault. Filled right version value to anatomy data.
+
+
+___
+
+
+
+### **📃 Testing**
+
+
+
+Tests: Copy file_handler as it will be removed by purging ayon code #5357
+
+Ayon code will get purged in the future from this repo/addon, therefore all `ayon_common` will be gone. `file_handler` gets internalized to tests as it is not used anywhere else.
+
+
+___
+
+
+
+
+
+
+## [3.16.2](https://github.com/ynput/OpenPype/tree/3.16.2)
+
+
+[Full Changelog](https://github.com/ynput/OpenPype/compare/3.16.1...3.16.2)
+
+### **🆕 New features**
+
+
+
+Fusion - Set selected tool to active #5327
+
+When you run the action to select a node, this PR makes the node-flow show the selected node + you'll see the nodes controls in the inspector.
+
+
+___
+
+
+
+### **🚀 Enhancements**
+
+
+
+Maya: All base create plugins #5326
+
+Prepared base classes for each creator type in Maya. Extended `MayaCreatorBase` to have default implementations of common logic with instances which is used in each type of plugin.
+
+
+___
+
+
+
+
+
+Windows: Support long paths on zip updates. #5265
+
+Support long paths for version extract on Windows.Use case is when having long paths in for example an addon. You can install to the C drive but because the zip files are extracted in the local users folder, it'll add additional sub directories to the paths and quickly get too long paths for Windows to handle the zip updates.
+
+
+___
+
+
+
+
+
+Blender: Added setting to set resolution and start/end frames at startup #5338
+
+This PR adds `set_resolution_startup`and `set_frames_startup` settings. They automatically set respectively the resolution and start/end frames and FPS in Blender when opening a file or creating a new one.
+
+
+___
+
+
+
+
+
+Blender: Support for ExtractBurnin #5339
+
+This PR adds support for ExtractBurnin for Blender, when publishing a Review.
+
+
+___
+
+
+
+
+
+Blender: Extract Camera as Alembic #5343
+
+Added support to extract Alembic Cameras in Blender.
+
+
+___
+
+
+
+### **🐛 Bug fixes**
+
+
+
+Maya: Validate Instance In Context #5335
+
+Missing new publisher error so the repair action shows up.
+
+
+___
+
+
+
+
+
+Settings: Fix default settings #5311
+
+Fixed defautl settings for shotgrid. Renamed `FarmRootEnumEntity` to `DynamicEnumEntity` and removed doubled ABC metaclass definition (all settings entities have abstract metaclass).
+
+
+___
+
+
+
+
+
+Deadline: missing context argument #5312
+
+Updated function arguments
+
+
+___
+
+
+
+
+
+Qt UI: Multiselection combobox PySide6 compatibility #5314
+
+- The check states are replaced with the values for PySide6
+- `QtCore.Qt.ItemIsUserTristate` is used instead of `QtCore.Qt.ItemIsTristate` to avoid crashes on PySide6
+
+
+___
+
+
+
+
+
+Docker: handle openssl 1.1.1 for centos 7 docker build #5319
+
+Move to python 3.9 has added need to use openssl 1.1.x - but it is not by default available on centos 7 image. This is fixing it.
+
+
+___
+
+
+
+
+
+houdini: fix typo in redshift proxy #5320
+
+I believe there's a typo in `create_redshift_proxy.py` ( extra ` ) in filename, and I made this PR to suggest a fix
+
+
+___
+
+
+
+
+
+Houdini: fix wrong creator identifier in pointCache workflow #5324
+
+FIxing a bug in publishing alembics, were invalid creator identifier caused missing family association.
+
+
+___
+
+
+
+
+
+Fix colorspace compatibility check #5334
+
+for some reason a user may have `PyOpenColorIO` installed to his machine, _in my case it came with renderman._it can trick the compatibility check as `import PyOpenColorIO` won't raise an error however it may be an old version _like my case_Beforecompatibility check was true and It used wrapper directly After Fix It will use wrapper via subprocess instead
+
+
+___
+
+
+
+### **Merged pull requests**
+
+
+
+Remove forgotten dev logging #5315
+
+
+___
+
+
+
+
+
+
+## [3.16.1](https://github.com/ynput/OpenPype/tree/3.16.1)
+
+
+[Full Changelog](https://github.com/ynput/OpenPype/compare/3.16.0...3.16.1)
+
+### **🆕 New features**
+
+
+
+Royal Render: Maya and Nuke support #5191
+
+Basic working implementation of Royal Render support in Maya.It expects New publisher implemented in Maya.
+
+
+___
+
+
+
+
+
+Blender: Blend File Family #4321
+
+Implementation of the Blend File family analogue to the Maya Scene one.
+
+
+___
+
+
+
+
+
+Houdini: simple bgeo publishing #4588
+
+Support for simple publishing of bgeo files.
+
+ This is adding basic support for bgeo publishing in Houdini. It will allow publishing bgeo in all supported formats (selectable in the creator options). If selected node has `output` on sop level, it will be used automatically as path in file node.
+
+
+___
+
+
+
+### **🚀 Enhancements**
+
+
+
+General: delivery action add renamed frame number in Loader #5024
+
+Frame Offset options for delivery in Openpype loader
+
+
+___
+
+
+
+
+
+Enhancement/houdini add path action for abc validator #5237
+
+Add a default path attribute Action.it's a helper action more than a repair action, which used to add a default single value.
+
+
+___
+
+
+
+
+
+Nuke: auto apply all settings after template build #5277
+
+Adding auto run of Apply All Settings after template is builder is finishing its process. This will apply Frame-range, Image size, Colorspace found in context of a task shot.
+
+
+___
+
+
+
+
+
+Harmony:Removed loader settings for Harmony #5289
+
+It shouldn't be configurable, it is internal logic. By adding additional extension it wouldn't start to work magically.
+
+
+___
+
+
+
+### **🐛 Bug fixes**
+
+
+
+AYON: Make appdirs case sensitive #5298
+
+Appdirs for AYON are case sensitive for linux and mac so we needed to change them to match ayon launcher. Changed 'ayon' to 'AYON' and 'ynput' to 'Ynput'.
+
+
+___
+
+
+
+
+
+Traypublisher: Fix plugin order #5299
+
+Frame range collector for traypublisher was moved to traypublisher plugins and changed order to make sure `assetEntity` is filled in `instance.data`.
+
+
+___
+
+
+
+
+
+Deadline: removing OPENPYPE_VERSION from some host submitters #5302
+
+Removing deprecated method of adding OPENPYPE_VERSION to job environment. It was leftover and other hosts have already been cleared.
+
+
+___
+
+
+
+
+
+AYON: Fix args for workfile conversion util #5308
+
+Workfile update conversion util function have right expected arguments.
+
+
+___
+
+
+
+### **🔀 Refactored code**
+
+
+
+Maya: Refactor imports to `lib.get_reference_node` since the other function… #5258
+
+Refactor imports to `lib.get_reference_node` since the other function is deprecated.
+
+
+___
+
+
+
+
+
+
+## [3.16.0](https://github.com/ynput/OpenPype/tree/3.16.0)
+
+
+[Full Changelog](https://github.com/ynput/OpenPype/compare/...3.16.0)
+
+### **🆕 New features**
+
+
+
+General: Reduce usage of legacy io #4723
+
+Replace usages of `legacy_io` with getter methods or reuse already available information. Create plugins using CreateContext are using context from CreateContext object. Loaders are usign getter function from context tools. Publish plugin are using information instance.data or context.data. In some cases were pieces of code refactored a little e.g. fps getter in maya.
+
+
+___
+
+
+
+
+
+Documentation: API docs reborn - yet again #4419
+
+## Feature
+
+Add functional base for API Documentation using Sphinx and AutoAPI.
+
+After unsuccessful #2512, #834 and #210 this is yet another try. But this time without ambition to solve the whole issue. This is making Shinx script to work and nothing else. Any changes and improvements in API docs should be made in subsequent PRs.
+
+## How to use it
+
+You can run:
+
+```sh
+cd .\docs
+make.bat html
+```
+
+or
+
+```sh
+cd ./docs
+make html
+```
+
+This will go over our code and generate **.rst** files in `/docs/source/autoapi` and from those it will generate full html documentation in `/docs/build/html`.
+
+During the build you'll see tons of red errors that are pointing to our issues:
+
+1) **Wrong imports**
+ Invalid import are usually wrong relative imports (too deep) or circular imports.
+
+2) **Invalid doc-strings**
+ Doc-strings to be processed into documentation needs to follow some syntax - this can be checked by running
+ `pydocstyle` that is already included with OpenPype
+3) **Invalid markdown/rst files**
+ md/rst files can be included inside rst files using `.. include::` directive. But they have to be properly formatted.
+
+
+## Editing rst templates
+
+Everything starts with `/docs/source/index.rst` - this file should be properly edited, Right now it just includes `readme.rst` that in turn include and parse main `README.md`. This is entrypoint to API documentation. All templates generated by AutoAPI are in `/docs/source/autoapi`. They should be eventually commited to repository and edited too.
+
+## Steps for enhancing API documentation
+
+1) Run `/docs/make.bat html`
+2) Read the red errors/warnings - fix it in the code
+3) Run `/docs/make.bat html` again until there are not red lines
+4) Edit rst files and add some meaningfull content there
+
+> **Note**
+> This can (should) be merged as is without doc-string fixes in the code or changes in templates. All additional improvements on API documentation should be made in new PRs.
+
+> **Warning**
+> You need to add new dependencies to use it. Run `create_venv`.
+
+Connected to #2490
+___
+
+
+
+
+
+Global: custom location for OP local versions #4673
+
+This provides configurable location to unzip Openpype version zips. By default, it was hardcoded to artist's app data folder, which might be problematic/slow with roaming profiles.Location must be accessible by user running OP Tray with write permissions (so `Program Files` might be problematic)
+
+
+___
+
+
+
+
+
+AYON: Update settings conversion #4837
+
+Updated conversion script of AYON settings to v3 settings. PR is related to changes in addons repository https://github.com/ynput/ayon-addons/pull/6 . Changed how the conversion happens -> conversion output does not start with openpype defaults but as empty dictionary.
+
+
+___
+
+
+
+
+
+AYON: Implement integrate links publish plugin #4842
+
+Implemented entity links get/create functions. Added new integrator which replaces v3 integrator for links.
+
+
+___
+
+
+
+
+
+General: Version attributes integration #4991
+
+Implemented unified integrate plugin to update version attributes after all integrations for AYON. The goal is to be able update attribute values in a unified way to a version when all addon integrators are done, so e.g. ftrack can add ftrack id to matching version in AYON server etc.The can be stored under `"versionAttributes"` key.
+
+
+___
+
+
+
+
+
+AYON: Staging versions can be used #4992
+
+Added ability to use staging versions in AYON mode.
+
+
+___
+
+
+
+
+
+AYON: Preparation for products #5038
+
+Prepare ayon settings conversion script for `product` settings conversion.
+
+
+___
+
+
+
+
+
+Loader: Hide inactive versions in UI #5101
+
+Added support for `active` argument to hide versions with active set to False in Loader UI when in AYON mode.
+
+
+___
+
+
+
+
+
+General: CLI addon command #5109
+
+Added `addon` alias for `module` in OpenPype cli commands.
+
+
+___
+
+
+
+
+
+AYON: OpenPype as server addon #5199
+
+OpenPype repository can be converted to AYON addon for distribution. Addon has defined dependencies that are required to use it and are not in base ayon-launcher (desktop application).
+
+
+___
+
+
+
+
+
+General: Runtime dependencies #5206
+
+Defined runtime dependencies in pyproject toml. Moved python ocio and otio modules there.
+
+
+___
+
+
+
+
+
+AYON: Bundle distribution #5209
+
+Since AYON server 0.3.0 are addon versions defined by bundles which affects how addons, dependency packages and installers are handled. Only source of truth, about any version of anything that should be used, is server bundle.
+
+
+___
+
+
+
+
+
+Feature/blender handle q application #5264
+
+This edit is to change the way the QApplication is run for Blender. It calls in the singleton (QApplication) during the register. This is made so that other Qt applications and addons are able to run on Blender. In its current implementation, if a QApplication is already running, all functionality of OpenPype becomes unavailable.
+
+
+___
+
+
+
+### **🚀 Enhancements**
+
+
+
+General: Connect to AYON server (base) #3924
+
+Initial implementation of being able use AYON server in current OpenPype client. Added ability to connect to AYON server and use base queries.
+
+ AYON mode has it's own executable (and start script). To start in AYON mode just replace `start.py` with `ayon_start.py` (added tray start script to tools). Added constant `AYON_SERVER_ENABLED` to `openpype/__init__.py` to know if ayon mode is enabled. In that case Mongo is not used at all and any attempts will cause crashes.I had to modify `~/openpype/client` content to be able do this switch. Mongo implementation was moved to `mongo` subfolder and use "star imports" in files from where current imports are used. Logic of any tool or query in code was not changed at all. Since functions were based on mongo queries they don't use full potential of AYON server abilities.ATM implementation has login UI, distribution of files from server and replacement of mongo queries. For queries is used `ayon_api` module. Which is in live development so the versions may change from day to day.
+
+
+___
+
+
+
+
+
+Enhancement kitsu note with exceptions #4537
+
+Adding a setting to choose some exceptions to IntegrateKitsuNote task status changes.
+
+
+___
+
+
+
+
+
+General: Environment variable for default OCIO configs #4670
+
+Define environment variable which lead to root of builtin ocio configs to be able change the root without changing settings. For the path in settings was used `"{OPENPYPE_ROOT}/vendor/bin/ocioconfig/OpenColorIOConfig"` which disallow to change the root somewhere else. That will be needed in AYON where configs won't be part of desktop application but downloaded from server.
+
+
+___
+
+
+
+
+
+AYON: Editorial hierarchy creation #4699
+
+Implemented extract hierarchy to AYON plugin which created entities in AYON using ayon api.
+
+
+___
+
+
+
+
+
+AYON: Vendorize ayon api #4753
+
+Vendorize ayon api into openpype vendor directory. The reason is that `ayon-python-api` is in live development and will fix/add features often in next few weeks/months, and because update of dependency requires new release -> new build, we want to avoid the need of doing that as it would affect OpenPype development.
+
+
+___
+
+
+
+
+
+General: Update PySide 6 for MacOs #4764
+
+New version of PySide6 does not have issues with settings UI. It is still breaking UI stylesheets so it is not changed for other plaforms but it is enhancement from previous state.
+
+
+___
+
+
+
+
+
+General: Removed unused cli commands #4902
+
+Removed `texturecopy` and `launch` cli commands from cli commands.
+
+
+___
+
+
+
+
+
+AYON: Linux & MacOS launch script #4970
+
+Added shell script to launch tray in AYON mode.
+
+
+___
+
+
+
+
+
+General: Qt scale enhancement #5059
+
+Set ~~'QT_SCALE_FACTOR_ROUNDING_POLICY'~~ scale factor rounding policy of QApplication to `PassThrough` so the scaling can be 'float' number and not just 'int' (150% -> 1.5 scale).
+
+
+___
+
+
+
+
+
+CI: WPS linting instead of Hound (rebase) 2 #5115
+
+Because Hound currently used to lint the code on GH ships with really old flake8 support, it fails miserably on any newer Python syntax. This PR is adding WPS linter to GitHub workflows that should step in.
+
+
+___
+
+
+
+
+
+Max: OP parameters only displays what is attached to the container #5229
+
+The OP parameter in 3dsmax only displays what is currently attached to the container while deleting while you can see the items which is not added when you are adding to the container.
+
+
+___
+
+
+
+
+
+Testing: improving logging during testing #5271
+
+Unit testing logging was crashing on more then one nested layers of inherited loggers.
+
+
+___
+
+
+
+
+
+Nuke: removing deprecated settings in baking #5275
+
+Removing deprecated settings for baking with reformat. This option was only for single reformat node and it had been substituted with multiple reposition nodes.
+
+
+___
+
+
+
+### **🐛 Bug fixes**
+
+
+
+AYON: General fixes and updates #4975
+
+Few smaller fixes related to AYON connection. Some of fixes were taken from this PR.
+
+
+___
+
+
+
+
+
+Start script: Change returncode on validate or list versions #4515
+
+Change exit code from `1` to `0` when versions are printed or when version is validated.
+
+ Return code `1` is indicating error but there didn't happen any error.
+
+
+___
+
+
+
+
+
+AYON: Change login UI works #4754
+
+Fixed change of login UI. Logic change UI did show up, new login was successful, but after restart was used the previous login. This change fix the issue.
+
+
+___
+
+
+
+
+
+AYON: General issues #4763
+
+Vendorized `ayon_api` from PR broke OpenPype launch, because `ayon_api` is not available. Moved `ayon_api` from ayon specific subforlder to `common` python vendor in OpenPype, and removed login in ayon start script (which was invalid anyway). Also made fixed compatibility with PySide6 by using `qtpy` instead of `Qt` and changing code which is not PySide6 compatible.
+
+
+___
+
+
+
+
+
+AYON: Small fixes #4841
+
+Bugsfixes and enhancements related to AYON logic. Define `BUILTIN_OCIO_ROOT` environment variable so OCIO configs are working. Use constants from ayon api instead of hardcoding them in codebase. Change process name from "openpype" to "ayon". Don't execute login dialog when application is not yet running but use `open` method instead. Fixed missing modules settings which were not taken from openpype defaults. Updated ayon api to `0.1.17`.
+
+
+___
+
+
+
+
+
+Bugfix - Update gazu to 0.9.3 #4845
+
+This updates Gazu to 0.9.3 to make sure Gazu works with Kitsu and Zou 0.16.x+
+
+
+___
+
+
+
+
+
+Igniter: fix error reports in silent mode #4909
+
+Some errors in silent mode commands in Igniter were suppressed and not visible for example in Deadline log.
+
+
+___
+
+
+
+
+
+General: Remove ayon api from poetry lock #4964
+
+Remove AYON python api from pyproject.toml and poetry.lock again.
+
+
+___
+
+
+
+
+
+Ftrack: Fix AYON settings conversion #4967
+
+Fix conversion of ftrack settings in AYON mode.
+
+
+___
+
+
+
+
+
+AYON: ISO date format conversion issues #4981
+
+Function `datetime.fromisoformat` was replaced with `arrow.get` to be used instead.
+
+
+___
+
+
+
+
+
+AYON: Missing files on representations #4989
+
+Fix integration of files into representation in server database.
+
+
+___
+
+
+
+
+
+General: Fix Python 2 vendor for arrow #4993
+
+Moved remaining dependencies for arrow from ftrack to python 2 vendor.
+
+
+___
+
+
+
+
+
+General: Fix new load plugins for next minor relase #5000
+
+Fix access to `fname` attribute which is not available on load plugin anymore.
+
+
+___
+
+
+
+
+
+General: Fix mongo secure connection #5031
+
+Fix `ssl` and `tls` keys checks in mongo uri query string.
+
+
+___
+
+
+
+
+
+AYON: Fix site sync settings #5069
+
+Fixed settings for AYON variant of sync server.
+
+
+___
+
+
+
+
+
+General: Replace deprecated keyword argument in PyMongo #5080
+
+Use argument `tlsCAFile` instead of `ssl_ca_certs` to avoid deprecation warnings.
+
+
+___
+
+
+
+
+
+Igniter: QApplication is created #5081
+
+Function `_get_qt_app` actually creates new `QApplication` if was not created yet.
+
+
+___
+
+
+
+
+
+General: Lower unidecode version #5090
+
+Use older version of Unidecode module to support Python 2.
+
+
+___
+
+
+
+
+
+General: Lower cryptography to 39.0.0 #5099
+
+Lower cryptography to 39.0.0 to avoid breaking of DCCs like Maya and Nuke.
+
+
+___
+
+
+
+
+
+AYON: Global environments key fix #5118
+
+Seems that when converting ayon settings to OP settings the `environments` setting is put under the `environments` key in `general` however when populating the environment the `environment` key gets picked up, which does not contain the environment variables from the `core/environments` setting
+
+
+___
+
+
+
+
+
+Add collector to tray publisher for getting frame range data #5152
+
+Add collector to tray publisher to get frame range data. User can choose to enable this collector if they need this in the publisher.Resolve #5136
+
+
+___
+
+
+
+
+
+Unreal: get current project settings not using unreal project name #5170
+
+There was a bug where Unreal project name was used to query project settings. But Unreal project name can differ from the "real" one because of naming convention rules set by Unreal. This is fixing it by asking for current project settings.
+
+
+___
+
+
+
+
+
+Substance Painter: Fix Collect Texture Set Images unable to copy.deepcopy due to QMenu #5238
+
+Fix `copy.deepcopy` of `instance.data`.
+
+
+___
+
+
+
+
+
+Ayon: server returns different key #5251
+
+Package returned from server has `filename` instead of `name`.
+
+
+___
+
+
+
+
+
+Substance Painter: Fix default color management settings #5259
+
+The default settings for color management for Substance Painter were invalid, it was set to override the global config by default but specified no valid config paths of its own - and thus errored that the paths were not correct.This sets the defaults correctly to match other hosts._I quickly checked - this seems to be the only host with the wrong default settings_
+
+
+___
+
+
+
+
+
+Nuke: fixing container data if windows path in value #5267
+
+Windows path in container data are reformatted. Previously it was reported that Nuke was rising `utf8 0xc0` error if backward slashes were in data values.
+
+
+___
+
+
+
+
+
+Houdini: fix typo error in collect arnold rop #5281
+
+Fixing a typo error in `collect_arnold_rop.py`Reference: #5280
+
+
+___
+
+
+
+
+
+Slack - enhanced logging and protection against failure #5287
+
+Covered issues found in production on customer site. SlackAPI exception doesn't need to have 'error', covered uncaught exception.
+
+
+___
+
+
+
+
+
+Maya: Removed unnecessary import of pyblish.cli #5292
+
+This import resulted in adding additional logging handler which lead to duplication of logs in hosts with plugins containing `is_in_tests` method. Import is unnecessary for testing functionality.
+
+
+___
+
+
+
+### **🔀 Refactored code**
+
+
+
+Loader: Remove `context` argument from Loader.__init__() #4602
+
+Remove the previously required `context` argument.
+
+
+___
+
+
+
+
+
+Global: Remove legacy integrator #4786
+
+Remove the legacy integrator.
+
+
+___
+
+
+
+### **📃 Documentation**
+
+
+
+Next Minor Release #5291
+
+
+___
+
+
+
+### **Merged pull requests**
+
+
+
+Maya: Refactor to new publisher #4388
+
+**Refactor Maya to use the new publisher with new creators.**
+
+
+- [x] Legacy instance can be converted in UI using `SubsetConvertorPlugin`
+- [x] Fix support for old style "render" and "vrayscene" instance to the new per layer format.
+- [x] Context data is stored with scene
+- [x] Workfile instance converted to AutoCreator
+- [x] Converted Creator classes
+- [x] Create animation
+- [x] Create ass
+- [x] Create assembly
+- [x] Create camera
+- [x] Create layout
+- [x] Create look
+- [x] Create mayascene
+- [x] Create model
+- [x] Create multiverse look
+- [x] Create multiverse usd
+- [x] Create multiverse usd comp
+- [x] Create multiverse usd over
+- [x] Create pointcache
+- [x] Create proxy abc
+- [x] Create redshift proxy
+- [x] Create render
+- [x] Create rendersetup
+- [x] Create review
+- [x] Create rig
+- [x] Create setdress
+- [x] Create unreal skeletalmesh
+- [x] Create unreal staticmesh
+- [x] Create vrayproxy
+- [x] Create vrayscene
+- [x] Create xgen
+- [x] Create yeti cache
+- [x] Create yeti rig
+- [ ] Tested new Creator publishes
+- [x] Publish animation
+- [x] Publish ass
+- [x] Publish assembly
+- [x] Publish camera
+- [x] Publish layout
+- [x] Publish look
+- [x] Publish mayascene
+- [x] Publish model
+- [ ] Publish multiverse look
+- [ ] Publish multiverse usd
+- [ ] Publish multiverse usd comp
+- [ ] Publish multiverse usd over
+- [x] Publish pointcache
+- [x] Publish proxy abc
+- [x] Publish redshift proxy
+- [x] Publish render
+- [x] Publish rendersetup
+- [x] Publish review
+- [x] Publish rig
+- [x] Publish setdress
+- [x] Publish unreal skeletalmesh
+- [x] Publish unreal staticmesh
+- [x] Publish vrayproxy
+- [x] Publish vrayscene
+- [x] Publish xgen
+- [x] Publish yeti cache
+- [x] Publish yeti rig
+- [x] Publish workfile
+- [x] Rig loader correctly generates a new style animation creator instance
+- [ ] Validations / Error messages for common validation failures look nice and usable as a report.
+- [ ] Make Create Animation hidden to the user (should not create manually?)
+- [x] Correctly detect difference between **'creator_attributes'** and **'instance_data'** since both are "flattened" to the top node.
+
+
+___
+
+
+
+
+
+Start script: Fix possible issues with destination drive path #4478
+
+Drive paths for windows are fixing possibly missing slash at the end of destination path.
+
+ Windows `subst` command require to have destination path with slash if it's a drive (it should be `G:\` not `G:`).
+
+
+___
+
+
+
+
+
+Global: Move PyOpenColorIO to vendor/python #4946
+
+So that DCCs don't conflict with their own.
+
+See https://github.com/ynput/OpenPype/pull/4267#issuecomment-1537153263 for the issue with Gaffer.
+
+I'm not sure if this is the correct approach, but I assume PySide/Shiboken is under `vendor/python` for this reason as well...
+___
+
+
+
+
+
+RuntimeError with Click on deadline publish #5065
+
+I changed Click to version 8.0 instead of 7.1.2 to solve this error:
+```
+2023-05-30 16:16:51: 0: STDOUT: Traceback (most recent call last):
+2023-05-30 16:16:51: 0: STDOUT: File "start.py", line 1126, in boot
+2023-05-30 16:16:51: 0: STDOUT: File "/prod/softprod/apps/openpype/LINUX/3.15/dependencies/click/core.py", line 829, in __call__
+2023-05-30 16:16:51: 0: STDOUT: return self.main(*args, **kwargs)
+2023-05-30 16:16:51: 0: STDOUT: File "/prod/softprod/apps/openpype/LINUX/3.15/dependencies/click/core.py", line 760, in main
+2023-05-30 16:16:51: 0: STDOUT: _verify_python3_env()
+2023-05-30 16:16:51: 0: STDOUT: File "/prod/softprod/apps/openpype/LINUX/3.15/dependencies/click/_unicodefun.py", line 126, in _verify_python3_env
+2023-05-30 16:16:51: 0: STDOUT: raise RuntimeError(
+2023-05-30 16:16:51: 0: STDOUT: RuntimeError: Click will abort further execution because Python 3 was configured to use ASCII as encoding for the environment. Consult https://click.palletsprojects.com/python3/ for mitigation steps.
+```
+
+
+___
+
+
+
+
+
+
+## [3.15.12](https://github.com/ynput/OpenPype/tree/3.15.12)
+
+
+[Full Changelog](https://github.com/ynput/OpenPype/compare/3.15.11...3.15.12)
+
+### **🆕 New features**
+
+
+
+Tray Publisher: User can set colorspace per instance explicitly #4901
+
+With this feature a user can set/override the colorspace for the representations of an instance explicitly instead of relying on the File Rules from project settings or alike. This way you can ingest any file and explicitly say "this file is colorspace X".
+
+
+___
+
+
+
+
+
+Review Family in Max #5001
+
+Review Feature by creating preview animation in 3dsmax(The code is still cleaning up so there is going to be some updates until it is ready for review)
+
+
+___
+
+
+
+
+
+AfterEffects: support for workfile template builder #5163
+
+This PR add functionality of templated workfile builder. It allows someone to prepare AE workfile with placeholders as for automatically loading particular representation of particular subset of particular asset from context where workfile is opened.Selection from multiple prepared workfiles is provided with usage of templates, specific type of tasks could use particular workfile template etc.Artists then can build workfile from template when opening new workfile.
+
+
+___
+
+
+
+
+
+CreatePlugin: Get next version helper #5242
+
+Implemented helper functions to get next available versions for create instances.
+
+
+___
+
+
+
+### **🚀 Enhancements**
+
+
+
+Maya: Improve Templates #4854
+
+Use library method for fetching reference node and support parent in hierarchy.
+
+
+___
+
+
+
+
+
+Bug: Maya - xgen sidecar files arent moved when saving workfile as an new asset workfile changing context - OP-6222 #5215
+
+This PR manages the Xgen files when switching context in the Workfiles app.
+
+
+___
+
+
+
+
+
+node references to check for duplicates in Max #5192
+
+No duplicates for node references in Max when users trying to select nodes before publishing
+
+
+___
+
+
+
+
+
+Tweak profiles logging to debug level #5194
+
+Tweak profiles logging to debug level since they aren't artist facing logs.
+
+
+___
+
+
+
+
+
+Enhancement: Reduce more visual clutter for artists in new publisher reports #5208
+
+Got this from one of our artists' reports - figured some of these logs were definitely not for the artist, reduced those logs to debug level.
+
+
+___
+
+
+
+
+
+Cosmetics: Tweak pyblish repair actions (icon, logs, docstring) #5213
+
+- Add icon to RepairContextAction
+- logs to debug level
+- also add attempt repair for RepairAction for consistency
+- fix RepairContextAction docstring to mention correct argument name
+
+#### Additional info
+
+We should not forget to remove this ["deprecated" actions.py file](https://github.com/ynput/OpenPype/blob/3501d0d23a78fbaef106da2fffe946cb49bef855/openpype/action.py) in 3.16 (next-minor)
+
+## Testing notes:
+
+1. Run some fabulous repairs!
+
+___
+
+
+
+
+
+Maya: fix save file prompt on launch last workfile with color management enabled + restructure `set_colorspace` #5225
+
+- Only set `configFilePath` when OCIO env var is not set since it doesn't do anything if OCIO var is set anyway.
+- Set the Maya 2022+ default OCIO path using the resources path instead of "" to avoid Maya Save File on new file after launch
+- **Bugfix: This is what fixes the Save prompt on open last workfile feature with Global color management enabled**
+- Move all code related to applying the maya settings together after querying the settings
+- Swap around the `if use_workfile_settings` since the check was reversed
+- Use `get_current_project_name()` instead of environment vars
+
+
+___
+
+
+
+
+
+Enhancement: More descriptive error messages for Loaders #5227
+
+Tweak raised errors and error messages for loader errors.
+
+
+___
+
+
+
+
+
+Houdini: add select invalid action for ValidateSopOutputNode #5231
+
+This PR adds `SelectROPAction` action to `houdini\api\action.py`and it's used in `Validate Output Node``SelectROPAction` is used to select the associated ROPs with the errored instances.
+
+
+___
+
+
+
+
+
+Remove new lines from the delivery template string #5235
+
+If the delivery template has a new line symbol at the end, say it was copied from the text editor, the delivery process will fail with `OSError` due to incorrect destination path. To avoid that I added `rstrip()` to the `delivery_path` processing.
+
+
+___
+
+
+
+
+
+Houdini: better selection on pointcache creation #5250
+
+Houdini allows `ObjNode` path as `sop_path` in the `ROP` unlike OP/ Ayon require `sop_path` to be set to a sop node path explicitly In this code, better selection is used to filter out invalid selections from OP/ Ayon point of viewValid selections are
+- `SopNode` that has parent of type `geo` or `subnet`
+- `ObjNode` of type `geo` that has
+- `SopNode` of type `output`
+- `SopNode` with render flag `on` (if no `Sopnode` of type `output`)this effectively filter
+- empty `ObjNode`
+- `ObjNode`(s) of other types like `cam` and `dopnet`
+- `SopNode`(s) that thier parents of other types like `cam` and `sop solver`
+
+
+___
+
+
+
+
+
+Update scene inventory even if any errors occurred during update #5252
+
+When selecting many items in the scene inventory to update versions and one of the items would error out the updating stops. However, before this PR the scene inventory would also NOT refresh making you think it did nothing.Also implemented as method to allow some code deduplication.
+
+
+___
+
+
+
+### **🐛 Bug fixes**
+
+
+
+Maya: Convert frame values to integers #5188
+
+Convert frame values to integers.
+
+
+___
+
+
+
+
+
+Maya: fix the register_event_callback correctly collecting workfile save after #5214
+
+fixing the bug of register_event_callback not being able to collect action of "workfile_save_after" for lock file action
+
+
+___
+
+
+
+
+
+Maya: aligning default settings to distributed aces 1.2 config #5233
+
+Maya colorspace setttings defaults are set the way they align our distributed ACES 1.2 config file set in global colorspace configs.
+
+
+___
+
+
+
+
+
+RepairAction and SelectInvalidAction filter instances failed on the exact plugin #5240
+
+RepairAction and SelectInvalidAction actually filter to instances that failed on the exact plugin - not on "any failure"
+
+
+___
+
+
+
+
+
+Maya: Bugfix look update nodes by id with non-unique shape names (query with `fullPath`) #5257
+
+Fixes a bug where updating attributes on nodes with assigned shader if shape name existed more than once in the scene due to `cmds.listRelatives` call not being done with the `fullPath=True` flag.Original error:
+```python
+# Traceback (most recent call last):
+# File "E:\openpype\OpenPype\openpype\tools\sceneinventory\view.py", line 264, in
+# lambda: self._show_version_dialog(items))
+# File "E:\openpype\OpenPype\openpype\tools\sceneinventory\view.py", line 722, in _show_version_dialog
+# self._update_containers(items, version)
+# File "E:\openpype\OpenPype\openpype\tools\sceneinventory\view.py", line 849, in _update_containers
+# update_container(item, item_version)
+# File "E:\openpype\OpenPype\openpype\pipeline\load\utils.py", line 502, in update_container
+# return loader.update(container, new_representation)
+# File "E:\openpype\OpenPype\openpype\hosts\maya\plugins\load\load_look.py", line 119, in update
+# nodes_by_id[lib.get_id(n)].append(n)
+# File "E:\openpype\OpenPype\openpype\hosts\maya\api\lib.py", line 1420, in get_id
+# sel.add(node)
+```
+
+
+___
+
+
+
+
+
+Nuke: Create nodes with inpanel=False #5051
+
+This PR is meant to remove the annoyance of the UI changing focus to the properties window just for the property window of the newly created node to disappear. Instead of using node.hideControlPanel I'm implementing the concealment during the creation of the node which will not change the focus of the current window.
+___
+
+
+
+
+
+Fix the reset frame range not setting up the right timeline in Max #5187
+
+Resolve #5181
+
+
+___
+
+
+
+
+
+Resolve: after launch automatization fixes #5193
+
+Workfile is no correctly created and aligned witch actual project. Also the launching mechanism is now fixed so even no workfile had been saved yet it will open OpenPype menu automatically.
+
+
+___
+
+
+
+
+
+General: Revert backward incompatible change of path to template to multiplatform #5197
+
+Now platformity is still handed by usage of `work[root]` (or any other root that is accessible across platforms.)
+
+
+___
+
+
+
+
+
+Nuke: root set format updating in node graph #5198
+
+Nuke root node needs to be reset on some values so any knobs could be updated in node graph. This works the same way as an user would change frame number so expressions would update its values in knobs.
+
+
+___
+
+
+
+
+
+Hiero: fixing otio current project and cosmetics #5200
+
+Otio were not returning correct current project once additional Untitled project was open in project manager stack.
+
+
+___
+
+
+
+
+
+Max: Publisher instances dont hold its enabled disabled states when Publisher reopened again #5202
+
+Resolve #5183, general maxscript conversion issue to python (e.g. bool conversion, true in maxscript while True in Python)(Also resolve the ValueError when you change the subset to publish into list view menu)
+
+
+___
+
+
+
+
+
+Burnins: Filter script is defined only for video streams #5205
+
+Burnins are working for inputs with audio.
+
+
+___
+
+
+
+
+
+Colorspace lib fix compatible python version comparison #5212
+
+Fix python version comparison.
+
+
+___
+
+
+
+
+
+Houdini: Fix `get_color_management_preferences` #5217
+
+Fix the issue described here where the logic for retrieving the current OCIO display and view was incorrectly trying to apply a regex to it.
+
+
+___
+
+
+
+
+
+Houdini: Redshift ROP image format bug #5218
+
+Problem :
+"RS_outputFileFormat" parm value was missing
+and there were more "image_format" than redshift rop supports
+
+Fix:
+1) removed unnecessary formats from `image_format_enum`
+2) add the selected format value to `RS_outputFileFormat`
+___
+
+
+
+
+
+Colorspace: check PyOpenColorIO rather then python version #5223
+
+Fixing previously merged PR (https://github.com/ynput/OpenPype/pull/5212) And applying better way to check compatibility with PyOpenColorIO python api.
+
+
+___
+
+
+
+
+
+Validate delivery action representations status #5228
+
+- disable delivery button if no representations checked
+- fix macos combobox layout
+- add error message if no delivery templates found
+
+
+___
+
+
+
+
+
+ Houdini: Add geometry check for pointcache family #5230
+
+When `sop_path` on ABC ROP node points to a non `SopNode`, these validators `validate_abc_primitive_to_detail.py`, `validate_primitive_hierarchy_paths.py` will error and crash when this line is executed `geo = output_node.geometryAtFrame(frame)`
+
+
+___
+
+
+
+
+
+Houdini: Add geometry check for VDB family #5232
+
+When `sop_path` on Geometry ROP node points to a non SopNode, this validator `validate_vdb_output_node.py` will error and crash when this line is executed`sop_node.geometryAtFrame(frame)`
+
+
+___
+
+
+
+
+
+Substance Painter: Include the setting only in publish tab #5234
+
+Instead of having two settings in both create and publish tab, there is solely one setting in the publish tab for users to set up the parameters.Resolve #5172
+
+
+___
+
+
+
+
+
+Maya: Fix collecting arnold prefix when none #5243
+
+When no prefix is specified in render settings, the renderlayer collector would error.
+
+
+___
+
+
+
+
+
+Deadline: OPENPYPE_VERSION should only be added when running from build #5244
+
+When running from source the environment variable `OPENPYPE_VERSION` should not be added. This is a bugfix for the feature #4489
+
+
+___
+
+
+
+
+
+Fix no prompt for "unsaved changes" showing when opening workfile in Houdini #5246
+
+Fix no prompt for "unsaved changes" showing when opening workfile in Houdini.
+
+
+___
+
+
+
+
+
+Fix no prompt for "unsaved changes" showing when opening workfile in Substance Painter #5248
+
+Fix no prompt for "unsaved changes" showing when opening workfile in Substance Painter.
+
+
+___
+
+
+
+
+
+General: add the os library before os.environ.get #5249
+
+Adding os library into `creator_plugins.py` due to `os.environ.get` in line 667
+
+
+___
+
+
+
+
+
+Maya: Fix set_attribute for enum attributes #5261
+
+Fix for #5260
+
+
+___
+
+
+
+
+
+Unreal: Move Qt imports away from module init #5268
+
+Importing `Window` creates errors in headless mode.
+```
+*** WRN: >>> { ModulesLoader }: [ FAILED to import host folder unreal ]
+=============================
+No Qt bindings could be found
+=============================
+Traceback (most recent call last):
+ File "C:\Users\tokejepsen\OpenPype\.venv\lib\site-packages\qtpy\__init__.py", line 252, in
+ from PySide6 import __version__ as PYSIDE_VERSION # analysis:ignore
+ModuleNotFoundERROR: No module named 'PySide6'
+
+During handling of the above exception, another exception occurred:
+
+Traceback (most recent call last):
+ File "C:\Users\tokejepsen\OpenPype\openpype\modules\base.py", line 385, in _load_modules
+ default_module = __import__(
+ File "C:\Users\tokejepsen\OpenPype\openpype\hosts\unreal\__init__.py", line 1, in
+ from .addon import UnrealAddon
+ File "C:\Users\tokejepsen\OpenPype\openpype\hosts\unreal\addon.py", line 4, in
+ from openpype.widgets.message_window import Window
+ File "C:\Users\tokejepsen\OpenPype\openpype\widgets\__init__.py", line 1, in
+ from .password_dialog import PasswordDialog
+ File "C:\Users\tokejepsen\OpenPype\openpype\widgets\password_dialog.py", line 1, in
+ from qtpy import QtWidgets, QtCore, QtGui
+ File "C:\Users\tokejepsen\OpenPype\.venv\lib\site-packages\qtpy\__init__.py", line 259, in
+ raise QtBindingsNotFoundERROR()
+qtpy.QtBindingsNotFoundERROR: No Qt bindings could be found
+```
+
+
+___
+
+
+
+### **🔀 Refactored code**
+
+
+
+Maya: Minor refactoring and code cleanup #5226
+
+Some small cleanup and refactoring of logic. Removing old comments, unused imports and some minor optimization. Also removed the prints of the loader names of each container the scene in `fix_incompatible_containers` + optimizing by using `set` and defining only once. Moved some UI related code/tweaks to run `on_init` only if not in headless mode. Removed an empty `obj.py` file.Each commit message kind of describes why the change was made.
+
+
+___
+
+
+
+### **Merged pull requests**
+
+
+
+Bug: Template builder fails when loading data without outliner representation #5222
+
+I add an assertion management in case the container does not have a represention in outliner.
+
+
+___
+
+
+
+
+
+AfterEffects - add container check validator to AE settings #5203
+
+Adds check if scene contains only latest version of loaded containers.
+
+
+___
+
+
+
+
+
+
## [3.15.11](https://github.com/ynput/OpenPype/tree/3.15.11)
@@ -1970,7 +6911,7 @@ ___
Maya Load References - Add Display Handle Setting #4904
-When we load a reference in Maya using OpenPype loader, display handle is checked by default and prevent us to select easily the object in the viewport. I understand that some productions like to keep this option, so I propose to add display handle to the reference loader settings.
+When we load a reference in Maya using OpenPype loader, display handle is checked by default and prevent us to select easily the object in the viewport. I understand that some productions like to keep this option, so I propose to add display handle to the reference loader settings.
___
@@ -2078,7 +7019,7 @@ ___
Patchelf version locked #4853
-For Centos dockerfile it is necessary to lock the patchelf version to the older, otherwise the build process fails.
+For Centos dockerfile it is necessary to lock the patchelf version to the older, otherwise the build process fails.
___
diff --git a/Dockerfile.centos7 b/Dockerfile.centos7
index ce1a624a4f..ab1d3f8253 100644
--- a/Dockerfile.centos7
+++ b/Dockerfile.centos7
@@ -32,12 +32,16 @@ RUN yum -y install https://dl.fedoraproject.org/pub/epel/epel-release-latest-7.n
wget \
gcc \
zlib-devel \
+ pcre-devel \
+ perl-core \
bzip2 \
bzip2-devel \
readline-devel \
sqlite sqlite-devel \
openssl-devel \
openssl-libs \
+ openssl11-devel \
+ openssl11-libs \
tk-devel libffi-devel \
patchelf \
automake \
@@ -71,7 +75,12 @@ RUN echo 'export PATH="$HOME/.pyenv/bin:$PATH"'>> $HOME/.bashrc \
&& echo 'eval "$(pyenv init -)"' >> $HOME/.bashrc \
&& echo 'eval "$(pyenv virtualenv-init -)"' >> $HOME/.bashrc \
&& echo 'eval "$(pyenv init --path)"' >> $HOME/.bashrc
-RUN source $HOME/.bashrc && pyenv install ${OPENPYPE_PYTHON_VERSION}
+RUN source $HOME/.bashrc \
+ && export CPPFLAGS="-I/usr/include/openssl11" \
+ && export LDFLAGS="-L/usr/lib64/openssl11 -lssl -lcrypto" \
+ && export PATH=/usr/local/openssl/bin:$PATH \
+ && export LD_LIBRARY_PATH=/usr/local/openssl/lib:$LD_LIBRARY_PATH \
+ && pyenv install ${OPENPYPE_PYTHON_VERSION}
COPY . /opt/openpype/
RUN rm -rf /openpype/.poetry || echo "No Poetry installed yet."
@@ -93,12 +102,15 @@ RUN source $HOME/.bashrc \
RUN source $HOME/.bashrc \
&& ./tools/fetch_thirdparty_libs.sh
+RUN echo 'export PYTHONPATH="/opt/openpype/vendor/python:$PYTHONPATH"'>> $HOME/.bashrc
RUN source $HOME/.bashrc \
&& bash ./tools/build.sh
RUN cp /usr/lib64/libffi* ./build/exe.linux-x86_64-3.9/lib \
- && cp /usr/lib64/libssl* ./build/exe.linux-x86_64-3.9/lib \
- && cp /usr/lib64/libcrypto* ./build/exe.linux-x86_64-3.9/lib \
+ && cp /usr/lib64/openssl11/libssl* ./build/exe.linux-x86_64-3.9/lib \
+ && cp /usr/lib64/openssl11/libcrypto* ./build/exe.linux-x86_64-3.9/lib \
+ && ln -sr ./build/exe.linux-x86_64-3.9/lib/libssl.so ./build/exe.linux-x86_64-3.9/lib/libssl.1.1.so \
+ && ln -sr ./build/exe.linux-x86_64-3.9/lib/libcrypto.so ./build/exe.linux-x86_64-3.9/lib/libcrypto.1.1.so \
&& cp /root/.pyenv/versions/${OPENPYPE_PYTHON_VERSION}/lib/libpython* ./build/exe.linux-x86_64-3.9/lib \
&& cp /usr/lib64/libxcb* ./build/exe.linux-x86_64-3.9/vendor/python/PySide2/Qt/lib
diff --git a/README.md b/README.md
index 8757e3db92..ce98f845e6 100644
--- a/README.md
+++ b/README.md
@@ -3,7 +3,7 @@
[](#contributors-)
OpenPype
-====
+========
[](https://github.com/pypeclub/pype/actions/workflows/documentation.yml) 
@@ -47,7 +47,7 @@ It can be built and ran on all common platforms. We develop and test on the foll
For more details on requirements visit [requirements documentation](https://openpype.io/docs/dev_requirements)
Building OpenPype
--------------
+-----------------
To build OpenPype you currently need [Python 3.9](https://www.python.org/downloads/) as we are following
[vfx platform](https://vfxplatform.com). Because of some Linux distros comes with newer Python version
@@ -62,14 +62,14 @@ development tools like [CMake](https://cmake.org/) and [Visual Studio](https://v
#### Clone repository:
```sh
-git clone --recurse-submodules git@github.com:Pypeclub/OpenPype.git
+git clone --recurse-submodules git@github.com:ynput/OpenPype.git
```
#### To build OpenPype:
-1) Run `.\tools\create_env.ps1` to create virtual environment in `.\venv`
+1) Run `.\tools\create_env.ps1` to create virtual environment in `.\venv`.
2) Run `.\tools\fetch_thirdparty_libs.ps1` to download third-party dependencies like ffmpeg and oiio. Those will be included in build.
-3) Run `.\tools\build.ps1` to build OpenPype executables in `.\build\`
+3) Run `.\tools\build.ps1` to build OpenPype executables in `.\build\`.
To create distributable OpenPype versions, run `./tools/create_zip.ps1` - that will
create zip file with name `openpype-vx.x.x.zip` parsed from current OpenPype repository and
@@ -88,38 +88,38 @@ some OpenPype dependencies like [CMake](https://cmake.org/) and **XCode Command
Easy way of installing everything necessary is to use [Homebrew](https://brew.sh):
1) Install **Homebrew**:
-```sh
-/bin/bash -c "$(curl -fsSL https://raw.githubusercontent.com/Homebrew/install/HEAD/install.sh)"
-```
+ ```sh
+ /bin/bash -c "$(curl -fsSL https://raw.githubusercontent.com/Homebrew/install/HEAD/install.sh)"
+ ```
2) Install **cmake**:
-```sh
-brew install cmake
-```
+ ```sh
+ brew install cmake
+ ```
3) Install [pyenv](https://github.com/pyenv/pyenv):
-```sh
-brew install pyenv
-echo 'eval "$(pyenv init -)"' >> ~/.zshrc
-pyenv init
-exec "$SHELL"
-PATH=$(pyenv root)/shims:$PATH
-```
+ ```sh
+ brew install pyenv
+ echo 'eval "$(pyenv init -)"' >> ~/.zshrc
+ pyenv init
+ exec "$SHELL"
+ PATH=$(pyenv root)/shims:$PATH
+ ```
-4) Pull in required Python version 3.9.x
-```sh
-# install Python build dependences
-brew install openssl readline sqlite3 xz zlib
+4) Pull in required Python version 3.9.x:
+ ```sh
+ # install Python build dependences
+ brew install openssl readline sqlite3 xz zlib
-# replace with up-to-date 3.9.x version
-pyenv install 3.9.6
-```
+ # replace with up-to-date 3.9.x version
+ pyenv install 3.9.6
+ ```
-5) Set local Python version
-```sh
-# switch to OpenPype source directory
-pyenv local 3.9.6
-```
+5) Set local Python version:
+ ```sh
+ # switch to OpenPype source directory
+ pyenv local 3.9.6
+ ```
#### To build OpenPype:
@@ -144,6 +144,10 @@ sudo ./tools/docker_build.sh centos7
If all is successful, you'll find built OpenPype in `./build/` folder.
+Docker build can be also started from Windows machine, just use `./tools/docker_build.ps1` instead of shell script.
+
+This could be used even for building linux build (with argument `centos7` or `debian`)
+
#### Manual build
You will need [Python >= 3.9](https://www.python.org/downloads/) and [git](https://git-scm.com/downloads). You'll also need [curl](https://curl.se) on systems that doesn't have one preinstalled.
@@ -241,7 +245,7 @@ pyenv local 3.9.6
Running OpenPype
-------------
+----------------
OpenPype can by executed either from live sources (this repository) or from
*"frozen code"* - executables that can be build using steps described above.
@@ -289,7 +293,7 @@ To run tests, execute `.\tools\run_tests(.ps1|.sh)`.
Developer tools
--------------
+---------------
In case you wish to add your own tools to `.\tools` folder without git tracking, it is possible by adding it with `dev_*` suffix (example: `dev_clear_pyc(.ps1|.sh)`).
diff --git a/common/openpype_common/distribution/README.md b/common/openpype_common/distribution/README.md
deleted file mode 100644
index 212eb267b8..0000000000
--- a/common/openpype_common/distribution/README.md
+++ /dev/null
@@ -1,18 +0,0 @@
-Addon distribution tool
-------------------------
-
-Code in this folder is backend portion of Addon distribution logic for v4 server.
-
-Each host, module will be separate Addon in the future. Each v4 server could run different set of Addons.
-
-Client (running on artist machine) will in the first step ask v4 for list of enabled addons.
-(It expects list of json documents matching to `addon_distribution.py:AddonInfo` object.)
-Next it will compare presence of enabled addon version in local folder. In the case of missing version of
-an addon, client will use information in the addon to download (from http/shared local disk/git) zip file
-and unzip it.
-
-Required part of addon distribution will be sharing of dependencies (python libraries, utilities) which is not part of this folder.
-
-Location of this folder might change in the future as it will be required for a clint to add this folder to sys.path reliably.
-
-This code needs to be independent on Openpype code as much as possible!
\ No newline at end of file
diff --git a/common/openpype_common/distribution/addon_distribution.py b/common/openpype_common/distribution/addon_distribution.py
deleted file mode 100644
index 5e48639dec..0000000000
--- a/common/openpype_common/distribution/addon_distribution.py
+++ /dev/null
@@ -1,208 +0,0 @@
-import os
-from enum import Enum
-from abc import abstractmethod
-import attr
-import logging
-import requests
-import platform
-import shutil
-
-from .file_handler import RemoteFileHandler
-from .addon_info import AddonInfo
-
-
-class UpdateState(Enum):
- EXISTS = "exists"
- UPDATED = "updated"
- FAILED = "failed"
-
-
-class AddonDownloader:
- log = logging.getLogger(__name__)
-
- def __init__(self):
- self._downloaders = {}
-
- def register_format(self, downloader_type, downloader):
- self._downloaders[downloader_type.value] = downloader
-
- def get_downloader(self, downloader_type):
- downloader = self._downloaders.get(downloader_type)
- if not downloader:
- raise ValueError(f"{downloader_type} not implemented")
- return downloader()
-
- @classmethod
- @abstractmethod
- def download(cls, source, destination):
- """Returns url to downloaded addon zip file.
-
- Args:
- source (dict): {type:"http", "url":"https://} ...}
- destination (str): local folder to unzip
- Returns:
- (str) local path to addon zip file
- """
- pass
-
- @classmethod
- def check_hash(cls, addon_path, addon_hash):
- """Compares 'hash' of downloaded 'addon_url' file.
-
- Args:
- addon_path (str): local path to addon zip file
- addon_hash (str): sha256 hash of zip file
- Raises:
- ValueError if hashes doesn't match
- """
- if not os.path.exists(addon_path):
- raise ValueError(f"{addon_path} doesn't exist.")
- if not RemoteFileHandler.check_integrity(addon_path,
- addon_hash,
- hash_type="sha256"):
- raise ValueError(f"{addon_path} doesn't match expected hash.")
-
- @classmethod
- def unzip(cls, addon_zip_path, destination):
- """Unzips local 'addon_zip_path' to 'destination'.
-
- Args:
- addon_zip_path (str): local path to addon zip file
- destination (str): local folder to unzip
- """
- RemoteFileHandler.unzip(addon_zip_path, destination)
- os.remove(addon_zip_path)
-
- @classmethod
- def remove(cls, addon_url):
- pass
-
-
-class OSAddonDownloader(AddonDownloader):
-
- @classmethod
- def download(cls, source, destination):
- # OS doesnt need to download, unzip directly
- addon_url = source["path"].get(platform.system().lower())
- if not os.path.exists(addon_url):
- raise ValueError("{} is not accessible".format(addon_url))
- return addon_url
-
-
-class HTTPAddonDownloader(AddonDownloader):
- CHUNK_SIZE = 100000
-
- @classmethod
- def download(cls, source, destination):
- source_url = source["url"]
- cls.log.debug(f"Downloading {source_url} to {destination}")
- file_name = os.path.basename(destination)
- _, ext = os.path.splitext(file_name)
- if (ext.replace(".", '') not
- in set(RemoteFileHandler.IMPLEMENTED_ZIP_FORMATS)):
- file_name += ".zip"
- RemoteFileHandler.download_url(source_url,
- destination,
- filename=file_name)
-
- return os.path.join(destination, file_name)
-
-
-def get_addons_info(server_endpoint):
- """Returns list of addon information from Server"""
- # TODO temp
- # addon_info = AddonInfo(
- # **{"name": "openpype_slack",
- # "version": "1.0.0",
- # "addon_url": "c:/projects/openpype_slack_1.0.0.zip",
- # "type": UrlType.FILESYSTEM,
- # "hash": "4be25eb6215e91e5894d3c5475aeb1e379d081d3f5b43b4ee15b0891cf5f5658"}) # noqa
- #
- # http_addon = AddonInfo(
- # **{"name": "openpype_slack",
- # "version": "1.0.0",
- # "addon_url": "https://drive.google.com/file/d/1TcuV8c2OV8CcbPeWi7lxOdqWsEqQNPYy/view?usp=sharing", # noqa
- # "type": UrlType.HTTP,
- # "hash": "4be25eb6215e91e5894d3c5475aeb1e379d081d3f5b43b4ee15b0891cf5f5658"}) # noqa
-
- response = requests.get(server_endpoint)
- if not response.ok:
- raise Exception(response.text)
-
- addons_info = []
- for addon in response.json():
- addons_info.append(AddonInfo(**addon))
- return addons_info
-
-
-def update_addon_state(addon_infos, destination_folder, factory,
- log=None):
- """Loops through all 'addon_infos', compares local version, unzips.
-
- Loops through server provided list of dictionaries with information about
- available addons. Looks if each addon is already present and deployed.
- If isn't, addon zip gets downloaded and unzipped into 'destination_folder'.
- Args:
- addon_infos (list of AddonInfo)
- destination_folder (str): local path
- factory (AddonDownloader): factory to get appropriate downloader per
- addon type
- log (logging.Logger)
- Returns:
- (dict): {"addon_full_name": UpdateState.value
- (eg. "exists"|"updated"|"failed")
- """
- if not log:
- log = logging.getLogger(__name__)
-
- download_states = {}
- for addon in addon_infos:
- full_name = "{}_{}".format(addon.name, addon.version)
- addon_dest = os.path.join(destination_folder, full_name)
-
- if os.path.isdir(addon_dest):
- log.debug(f"Addon version folder {addon_dest} already exists.")
- download_states[full_name] = UpdateState.EXISTS.value
- continue
-
- for source in addon.sources:
- download_states[full_name] = UpdateState.FAILED.value
- try:
- downloader = factory.get_downloader(source.type)
- zip_file_path = downloader.download(attr.asdict(source),
- addon_dest)
- downloader.check_hash(zip_file_path, addon.hash)
- downloader.unzip(zip_file_path, addon_dest)
- download_states[full_name] = UpdateState.UPDATED.value
- break
- except Exception:
- log.warning(f"Error happened during updating {addon.name}",
- exc_info=True)
- if os.path.isdir(addon_dest):
- log.debug(f"Cleaning {addon_dest}")
- shutil.rmtree(addon_dest)
-
- return download_states
-
-
-def check_addons(server_endpoint, addon_folder, downloaders):
- """Main entry point to compare existing addons with those on server.
-
- Args:
- server_endpoint (str): url to v4 server endpoint
- addon_folder (str): local dir path for addons
- downloaders (AddonDownloader): factory of downloaders
-
- Raises:
- (RuntimeError) if any addon failed update
- """
- addons_info = get_addons_info(server_endpoint)
- result = update_addon_state(addons_info,
- addon_folder,
- downloaders)
- if UpdateState.FAILED.value in result.values():
- raise RuntimeError(f"Unable to update some addons {result}")
-
-
-def cli(*args):
- raise NotImplementedError
diff --git a/common/openpype_common/distribution/addon_info.py b/common/openpype_common/distribution/addon_info.py
deleted file mode 100644
index 00ece11f3b..0000000000
--- a/common/openpype_common/distribution/addon_info.py
+++ /dev/null
@@ -1,80 +0,0 @@
-import attr
-from enum import Enum
-
-
-class UrlType(Enum):
- HTTP = "http"
- GIT = "git"
- FILESYSTEM = "filesystem"
-
-
-@attr.s
-class MultiPlatformPath(object):
- windows = attr.ib(default=None)
- linux = attr.ib(default=None)
- darwin = attr.ib(default=None)
-
-
-@attr.s
-class AddonSource(object):
- type = attr.ib()
-
-
-@attr.s
-class LocalAddonSource(AddonSource):
- path = attr.ib(default=attr.Factory(MultiPlatformPath))
-
-
-@attr.s
-class WebAddonSource(AddonSource):
- url = attr.ib(default=None)
-
-
-@attr.s
-class VersionData(object):
- version_data = attr.ib(default=None)
-
-
-@attr.s
-class AddonInfo(object):
- """Object matching json payload from Server"""
- name = attr.ib()
- version = attr.ib()
- title = attr.ib(default=None)
- sources = attr.ib(default=attr.Factory(dict))
- hash = attr.ib(default=None)
- description = attr.ib(default=None)
- license = attr.ib(default=None)
- authors = attr.ib(default=None)
-
- @classmethod
- def from_dict(cls, data):
- sources = []
-
- production_version = data.get("productionVersion")
- if not production_version:
- return
-
- # server payload contains info about all versions
- # active addon must have 'productionVersion' and matching version info
- version_data = data.get("versions", {})[production_version]
-
- for source in version_data.get("clientSourceInfo", []):
- if source.get("type") == UrlType.FILESYSTEM.value:
- source_addon = LocalAddonSource(type=source["type"],
- path=source["path"])
- if source.get("type") == UrlType.HTTP.value:
- source_addon = WebAddonSource(type=source["type"],
- url=source["url"])
-
- sources.append(source_addon)
-
- return cls(name=data.get("name"),
- version=production_version,
- sources=sources,
- hash=data.get("hash"),
- description=data.get("description"),
- title=data.get("title"),
- license=data.get("license"),
- authors=data.get("authors"))
-
diff --git a/common/openpype_common/distribution/tests/test_addon_distributtion.py b/common/openpype_common/distribution/tests/test_addon_distributtion.py
deleted file mode 100644
index 765ea0596a..0000000000
--- a/common/openpype_common/distribution/tests/test_addon_distributtion.py
+++ /dev/null
@@ -1,167 +0,0 @@
-import pytest
-import attr
-import tempfile
-
-from common.openpype_common.distribution.addon_distribution import (
- AddonDownloader,
- OSAddonDownloader,
- HTTPAddonDownloader,
- AddonInfo,
- update_addon_state,
- UpdateState
-)
-from common.openpype_common.distribution.addon_info import UrlType
-
-
-@pytest.fixture
-def addon_downloader():
- addon_downloader = AddonDownloader()
- addon_downloader.register_format(UrlType.FILESYSTEM, OSAddonDownloader)
- addon_downloader.register_format(UrlType.HTTP, HTTPAddonDownloader)
-
- yield addon_downloader
-
-
-@pytest.fixture
-def http_downloader(addon_downloader):
- yield addon_downloader.get_downloader(UrlType.HTTP.value)
-
-
-@pytest.fixture
-def temp_folder():
- yield tempfile.mkdtemp()
-
-
-@pytest.fixture
-def sample_addon_info():
- addon_info = {
- "versions": {
- "1.0.0": {
- "clientPyproject": {
- "tool": {
- "poetry": {
- "dependencies": {
- "nxtools": "^1.6",
- "orjson": "^3.6.7",
- "typer": "^0.4.1",
- "email-validator": "^1.1.3",
- "python": "^3.10",
- "fastapi": "^0.73.0"
- }
- }
- }
- },
- "hasSettings": True,
- "clientSourceInfo": [
- {
- "type": "http",
- "url": "https://drive.google.com/file/d/1TcuV8c2OV8CcbPeWi7lxOdqWsEqQNPYy/view?usp=sharing" # noqa
- },
- {
- "type": "filesystem",
- "path": {
- "windows": ["P:/sources/some_file.zip",
- "W:/sources/some_file.zip"], # noqa
- "linux": ["/mnt/srv/sources/some_file.zip"],
- "darwin": ["/Volumes/srv/sources/some_file.zip"]
- }
- }
- ],
- "frontendScopes": {
- "project": {
- "sidebar": "hierarchy"
- }
- }
- }
- },
- "description": "",
- "title": "Slack addon",
- "name": "openpype_slack",
- "productionVersion": "1.0.0",
- "hash": "4be25eb6215e91e5894d3c5475aeb1e379d081d3f5b43b4ee15b0891cf5f5658" # noqa
- }
- yield addon_info
-
-
-def test_register(printer):
- addon_downloader = AddonDownloader()
-
- assert len(addon_downloader._downloaders) == 0, "Contains registered"
-
- addon_downloader.register_format(UrlType.FILESYSTEM, OSAddonDownloader)
- assert len(addon_downloader._downloaders) == 1, "Should contain one"
-
-
-def test_get_downloader(printer, addon_downloader):
- assert addon_downloader.get_downloader(UrlType.FILESYSTEM.value), "Should find" # noqa
-
- with pytest.raises(ValueError):
- addon_downloader.get_downloader("unknown"), "Shouldn't find"
-
-
-def test_addon_info(printer, sample_addon_info):
- """Tests parsing of expected payload from v4 server into AadonInfo."""
- valid_minimum = {
- "name": "openpype_slack",
- "productionVersion": "1.0.0",
- "versions": {
- "1.0.0": {
- "clientSourceInfo": [
- {
- "type": "filesystem",
- "path": {
- "windows": [
- "P:/sources/some_file.zip",
- "W:/sources/some_file.zip"],
- "linux": [
- "/mnt/srv/sources/some_file.zip"],
- "darwin": [
- "/Volumes/srv/sources/some_file.zip"] # noqa
- }
- }
- ]
- }
- }
- }
-
- assert AddonInfo.from_dict(valid_minimum), "Missing required fields"
-
- valid_minimum["versions"].pop("1.0.0")
- with pytest.raises(KeyError):
- assert not AddonInfo.from_dict(valid_minimum), "Must fail without version data" # noqa
-
- valid_minimum.pop("productionVersion")
- assert not AddonInfo.from_dict(
- valid_minimum), "none if not productionVersion" # noqa
-
- addon = AddonInfo.from_dict(sample_addon_info)
- assert addon, "Should be created"
- assert addon.name == "openpype_slack", "Incorrect name"
- assert addon.version == "1.0.0", "Incorrect version"
-
- with pytest.raises(TypeError):
- assert addon["name"], "Dict approach not implemented"
-
- addon_as_dict = attr.asdict(addon)
- assert addon_as_dict["name"], "Dict approach should work"
-
-
-def test_update_addon_state(printer, sample_addon_info,
- temp_folder, addon_downloader):
- """Tests possible cases of addon update."""
- addon_info = AddonInfo.from_dict(sample_addon_info)
- orig_hash = addon_info.hash
-
- addon_info.hash = "brokenhash"
- result = update_addon_state([addon_info], temp_folder, addon_downloader)
- assert result["openpype_slack_1.0.0"] == UpdateState.FAILED.value, \
- "Update should failed because of wrong hash"
-
- addon_info.hash = orig_hash
- result = update_addon_state([addon_info], temp_folder, addon_downloader)
- assert result["openpype_slack_1.0.0"] == UpdateState.UPDATED.value, \
- "Addon should have been updated"
-
- result = update_addon_state([addon_info], temp_folder, addon_downloader)
- assert result["openpype_slack_1.0.0"] == UpdateState.EXISTS.value, \
- "Addon should already exist"
diff --git a/docs/README.md b/docs/README.md
new file mode 100644
index 0000000000..102da990aa
--- /dev/null
+++ b/docs/README.md
@@ -0,0 +1,74 @@
+API Documentation
+=================
+
+This documents the way how to build and modify API documentation using Sphinx and AutoAPI. Ground for documentation
+should be directly in sources - in docstrings and markdowns. Sphinx and AutoAPI will crawl over them and generate
+RST files that are in turn used to generate HTML documentation. For docstrings we prefer "Napoleon" or "Google" style
+docstrings, but RST is also acceptable mainly in cases where you need to use Sphinx directives.
+
+Using only docstrings is not really viable as some documentation should be done on higher level - like overview of
+some modules/functionality and so on. This should be done directly in RST files and committed to repository.
+
+Configuration
+-------------
+Configuration is done in `/docs/source/conf.py`. The most important settings are:
+
+- `autodoc_mock_imports`: add modules that can't be actually imported by Sphinx in running environment, like `nuke`, `maya`, etc.
+- `autoapi_ignore`: add directories that shouldn't be processed by **AutoAPI**, like vendor dirs, etc.
+- `html_theme_options`: you can use these options to influence how the html theme of the generated files will look.
+- `myst_gfm_only`: are Myst parser option for Markdown setting what flavour of Markdown should be used.
+
+How to build it
+---------------
+
+You can run:
+
+```sh
+cd .\docs
+make.bat html
+```
+
+on linux/macOS:
+
+```sh
+cd ./docs
+make html
+```
+
+This will go over our code and generate **.rst** files in `/docs/source/autoapi` and from those it will generate
+full html documentation in `/docs/build/html`.
+
+During the build you may see tons of red errors that are pointing to our issues:
+
+1) **Wrong imports** -
+Invalid import are usually wrong relative imports (too deep) or circular imports.
+2) **Invalid docstrings** -
+Docstrings to be processed into documentation needs to follow some syntax - this can be checked by running
+`pydocstyle` that is already included with OpenPype
+3) **Invalid markdown/rst files** -
+Markdown/RST files can be included inside RST files using `.. include::` directive. But they have to be properly
+formatted.
+
+Editing RST templates
+---------------------
+Everything starts with `/docs/source/index.rst` - this file should be properly edited, Right now it just
+includes `readme.rst` that in turn include and parse main `README.md`. This is entrypoint to API documentation.
+All templates generated by AutoAPI are in `/docs/source/autoapi`. They should be eventually committed to repository
+and edited too.
+
+Steps for enhancing API documentation
+-------------------------------------
+
+1) Run `/docs/make.bat html`
+2) Read the red errors/warnings - fix it in the code
+3) Run `/docs/make.bat html` - again until there are no red lines
+4) Edit RST files and add some meaningful content there
+
+Resources
+=========
+
+- [ReStructuredText on Wikipedia](https://en.wikipedia.org/wiki/ReStructuredText)
+- [RST Quick Reference](https://docutils.sourceforge.io/docs/user/rst/quickref.html)
+- [Sphinx AutoAPI Documentation](https://sphinx-autoapi.readthedocs.io/en/latest/)
+- [Example of Google Style Python Docstrings](https://sphinxcontrib-napoleon.readthedocs.io/en/latest/example_google.html)
+- [Sphinx Directives](https://www.sphinx-doc.org/en/master/usage/restructuredtext/directives.html)
diff --git a/docs/make.bat b/docs/make.bat
index 4d9eb83d9f..1d261df277 100644
--- a/docs/make.bat
+++ b/docs/make.bat
@@ -5,7 +5,7 @@ pushd %~dp0
REM Command file for Sphinx documentation
if "%SPHINXBUILD%" == "" (
- set SPHINXBUILD=sphinx-build
+ set SPHINXBUILD=..\.poetry\bin\poetry run sphinx-build
)
set SOURCEDIR=source
set BUILDDIR=build
diff --git a/docs/source/_static/AYON_tight_G.svg b/docs/source/_static/AYON_tight_G.svg
new file mode 100644
index 0000000000..2c5b73deea
--- /dev/null
+++ b/docs/source/_static/AYON_tight_G.svg
@@ -0,0 +1,38 @@
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
diff --git a/common/openpype_common/distribution/__init__.py b/docs/source/_static/README.md
similarity index 100%
rename from common/openpype_common/distribution/__init__.py
rename to docs/source/_static/README.md
diff --git a/docs/source/_templates/autoapi/index.rst b/docs/source/_templates/autoapi/index.rst
new file mode 100644
index 0000000000..95d0ad8911
--- /dev/null
+++ b/docs/source/_templates/autoapi/index.rst
@@ -0,0 +1,15 @@
+API Reference
+=============
+
+This page contains auto-generated API reference documentation [#f1]_.
+
+.. toctree::
+ :titlesonly:
+
+ {% for page in pages %}
+ {% if page.top_level_object and page.display %}
+ {{ page.include_path }}
+ {% endif %}
+ {% endfor %}
+
+.. [#f1] Created with `sphinx-autoapi `_
diff --git a/docs/source/_templates/autoapi/python/attribute.rst b/docs/source/_templates/autoapi/python/attribute.rst
new file mode 100644
index 0000000000..ebaba555ad
--- /dev/null
+++ b/docs/source/_templates/autoapi/python/attribute.rst
@@ -0,0 +1 @@
+{% extends "python/data.rst" %}
diff --git a/docs/source/_templates/autoapi/python/class.rst b/docs/source/_templates/autoapi/python/class.rst
new file mode 100644
index 0000000000..df5edffb62
--- /dev/null
+++ b/docs/source/_templates/autoapi/python/class.rst
@@ -0,0 +1,58 @@
+{% if obj.display %}
+.. py:{{ obj.type }}:: {{ obj.short_name }}{% if obj.args %}({{ obj.args }}){% endif %}
+{% for (args, return_annotation) in obj.overloads %}
+ {{ " " * (obj.type | length) }} {{ obj.short_name }}{% if args %}({{ args }}){% endif %}
+{% endfor %}
+
+
+ {% if obj.bases %}
+ {% if "show-inheritance" in autoapi_options %}
+ Bases: {% for base in obj.bases %}{{ base|link_objs }}{% if not loop.last %}, {% endif %}{% endfor %}
+ {% endif %}
+
+
+ {% if "show-inheritance-diagram" in autoapi_options and obj.bases != ["object"] %}
+ .. autoapi-inheritance-diagram:: {{ obj.obj["full_name"] }}
+ :parts: 1
+ {% if "private-members" in autoapi_options %}
+ :private-bases:
+ {% endif %}
+
+ {% endif %}
+ {% endif %}
+ {% if obj.docstring %}
+ {{ obj.docstring|indent(3) }}
+ {% endif %}
+ {% if "inherited-members" in autoapi_options %}
+ {% set visible_classes = obj.classes|selectattr("display")|list %}
+ {% else %}
+ {% set visible_classes = obj.classes|rejectattr("inherited")|selectattr("display")|list %}
+ {% endif %}
+ {% for klass in visible_classes %}
+ {{ klass.render()|indent(3) }}
+ {% endfor %}
+ {% if "inherited-members" in autoapi_options %}
+ {% set visible_properties = obj.properties|selectattr("display")|list %}
+ {% else %}
+ {% set visible_properties = obj.properties|rejectattr("inherited")|selectattr("display")|list %}
+ {% endif %}
+ {% for property in visible_properties %}
+ {{ property.render()|indent(3) }}
+ {% endfor %}
+ {% if "inherited-members" in autoapi_options %}
+ {% set visible_attributes = obj.attributes|selectattr("display")|list %}
+ {% else %}
+ {% set visible_attributes = obj.attributes|rejectattr("inherited")|selectattr("display")|list %}
+ {% endif %}
+ {% for attribute in visible_attributes %}
+ {{ attribute.render()|indent(3) }}
+ {% endfor %}
+ {% if "inherited-members" in autoapi_options %}
+ {% set visible_methods = obj.methods|selectattr("display")|list %}
+ {% else %}
+ {% set visible_methods = obj.methods|rejectattr("inherited")|selectattr("display")|list %}
+ {% endif %}
+ {% for method in visible_methods %}
+ {{ method.render()|indent(3) }}
+ {% endfor %}
+{% endif %}
diff --git a/docs/source/_templates/autoapi/python/data.rst b/docs/source/_templates/autoapi/python/data.rst
new file mode 100644
index 0000000000..3d12b2d0c7
--- /dev/null
+++ b/docs/source/_templates/autoapi/python/data.rst
@@ -0,0 +1,37 @@
+{% if obj.display %}
+.. py:{{ obj.type }}:: {{ obj.name }}
+ {%- if obj.annotation is not none %}
+
+ :type: {%- if obj.annotation %} {{ obj.annotation }}{%- endif %}
+
+ {%- endif %}
+
+ {%- if obj.value is not none %}
+
+ :value: {% if obj.value is string and obj.value.splitlines()|count > 1 -%}
+ Multiline-String
+
+ .. raw:: html
+
+ Show Value
+
+ .. code-block:: python
+
+ """{{ obj.value|indent(width=8,blank=true) }}"""
+
+ .. raw:: html
+
+
+
+ {%- else -%}
+ {%- if obj.value is string -%}
+ {{ "%r" % obj.value|string|truncate(100) }}
+ {%- else -%}
+ {{ obj.value|string|truncate(100) }}
+ {%- endif -%}
+ {%- endif %}
+ {%- endif %}
+
+
+ {{ obj.docstring|indent(3) }}
+{% endif %}
diff --git a/docs/source/_templates/autoapi/python/exception.rst b/docs/source/_templates/autoapi/python/exception.rst
new file mode 100644
index 0000000000..92f3d38fd5
--- /dev/null
+++ b/docs/source/_templates/autoapi/python/exception.rst
@@ -0,0 +1 @@
+{% extends "python/class.rst" %}
diff --git a/docs/source/_templates/autoapi/python/function.rst b/docs/source/_templates/autoapi/python/function.rst
new file mode 100644
index 0000000000..b00d5c2445
--- /dev/null
+++ b/docs/source/_templates/autoapi/python/function.rst
@@ -0,0 +1,15 @@
+{% if obj.display %}
+.. py:function:: {{ obj.short_name }}({{ obj.args }}){% if obj.return_annotation is not none %} -> {{ obj.return_annotation }}{% endif %}
+
+{% for (args, return_annotation) in obj.overloads %}
+ {{ obj.short_name }}({{ args }}){% if return_annotation is not none %} -> {{ return_annotation }}{% endif %}
+
+{% endfor %}
+ {% for property in obj.properties %}
+ :{{ property }}:
+ {% endfor %}
+
+ {% if obj.docstring %}
+ {{ obj.docstring|indent(3) }}
+ {% endif %}
+{% endif %}
diff --git a/docs/source/_templates/autoapi/python/method.rst b/docs/source/_templates/autoapi/python/method.rst
new file mode 100644
index 0000000000..723cb7bbe5
--- /dev/null
+++ b/docs/source/_templates/autoapi/python/method.rst
@@ -0,0 +1,19 @@
+{%- if obj.display %}
+.. py:method:: {{ obj.short_name }}({{ obj.args }}){% if obj.return_annotation is not none %} -> {{ obj.return_annotation }}{% endif %}
+
+{% for (args, return_annotation) in obj.overloads %}
+ {{ obj.short_name }}({{ args }}){% if return_annotation is not none %} -> {{ return_annotation }}{% endif %}
+
+{% endfor %}
+ {% if obj.properties %}
+ {% for property in obj.properties %}
+ :{{ property }}:
+ {% endfor %}
+
+ {% else %}
+
+ {% endif %}
+ {% if obj.docstring %}
+ {{ obj.docstring|indent(3) }}
+ {% endif %}
+{% endif %}
diff --git a/docs/source/_templates/autoapi/python/module.rst b/docs/source/_templates/autoapi/python/module.rst
new file mode 100644
index 0000000000..d2714f6c9d
--- /dev/null
+++ b/docs/source/_templates/autoapi/python/module.rst
@@ -0,0 +1,114 @@
+{% if not obj.display %}
+:orphan:
+
+{% endif %}
+:py:mod:`{{ obj.name }}`
+=========={{ "=" * obj.name|length }}
+
+.. py:module:: {{ obj.name }}
+
+{% if obj.docstring %}
+.. autoapi-nested-parse::
+
+ {{ obj.docstring|indent(3) }}
+
+{% endif %}
+
+{% block subpackages %}
+{% set visible_subpackages = obj.subpackages|selectattr("display")|list %}
+{% if visible_subpackages %}
+Subpackages
+-----------
+.. toctree::
+ :titlesonly:
+ :maxdepth: 3
+
+{% for subpackage in visible_subpackages %}
+ {{ subpackage.short_name }}/index.rst
+{% endfor %}
+
+
+{% endif %}
+{% endblock %}
+{% block submodules %}
+{% set visible_submodules = obj.submodules|selectattr("display")|list %}
+{% if visible_submodules %}
+Submodules
+----------
+.. toctree::
+ :titlesonly:
+ :maxdepth: 1
+
+{% for submodule in visible_submodules %}
+ {{ submodule.short_name }}/index.rst
+{% endfor %}
+
+
+{% endif %}
+{% endblock %}
+{% block content %}
+{% if obj.all is not none %}
+{% set visible_children = obj.children|selectattr("short_name", "in", obj.all)|list %}
+{% elif obj.type is equalto("package") %}
+{% set visible_children = obj.children|selectattr("display")|list %}
+{% else %}
+{% set visible_children = obj.children|selectattr("display")|rejectattr("imported")|list %}
+{% endif %}
+{% if visible_children %}
+{{ obj.type|title }} Contents
+{{ "-" * obj.type|length }}---------
+
+{% set visible_classes = visible_children|selectattr("type", "equalto", "class")|list %}
+{% set visible_functions = visible_children|selectattr("type", "equalto", "function")|list %}
+{% set visible_attributes = visible_children|selectattr("type", "equalto", "data")|list %}
+{% if "show-module-summary" in autoapi_options and (visible_classes or visible_functions) %}
+{% block classes scoped %}
+{% if visible_classes %}
+Classes
+~~~~~~~
+
+.. autoapisummary::
+
+{% for klass in visible_classes %}
+ {{ klass.id }}
+{% endfor %}
+
+
+{% endif %}
+{% endblock %}
+
+{% block functions scoped %}
+{% if visible_functions %}
+Functions
+~~~~~~~~~
+
+.. autoapisummary::
+
+{% for function in visible_functions %}
+ {{ function.id }}
+{% endfor %}
+
+
+{% endif %}
+{% endblock %}
+
+{% block attributes scoped %}
+{% if visible_attributes %}
+Attributes
+~~~~~~~~~~
+
+.. autoapisummary::
+
+{% for attribute in visible_attributes %}
+ {{ attribute.id }}
+{% endfor %}
+
+
+{% endif %}
+{% endblock %}
+{% endif %}
+{% for obj_item in visible_children %}
+{{ obj_item.render()|indent(0) }}
+{% endfor %}
+{% endif %}
+{% endblock %}
diff --git a/docs/source/_templates/autoapi/python/package.rst b/docs/source/_templates/autoapi/python/package.rst
new file mode 100644
index 0000000000..fb9a64965e
--- /dev/null
+++ b/docs/source/_templates/autoapi/python/package.rst
@@ -0,0 +1 @@
+{% extends "python/module.rst" %}
diff --git a/docs/source/_templates/autoapi/python/property.rst b/docs/source/_templates/autoapi/python/property.rst
new file mode 100644
index 0000000000..70af24236f
--- /dev/null
+++ b/docs/source/_templates/autoapi/python/property.rst
@@ -0,0 +1,15 @@
+{%- if obj.display %}
+.. py:property:: {{ obj.short_name }}
+ {% if obj.annotation %}
+ :type: {{ obj.annotation }}
+ {% endif %}
+ {% if obj.properties %}
+ {% for property in obj.properties %}
+ :{{ property }}:
+ {% endfor %}
+ {% endif %}
+
+ {% if obj.docstring %}
+ {{ obj.docstring|indent(3) }}
+ {% endif %}
+{% endif %}
diff --git a/docs/source/conf.py b/docs/source/conf.py
index 5b34ff8dc0..916a397e8e 100644
--- a/docs/source/conf.py
+++ b/docs/source/conf.py
@@ -17,18 +17,29 @@
import os
import sys
-pype_root = os.path.abspath('../..')
-sys.path.insert(0, pype_root)
+import revitron_sphinx_theme
+
+openpype_root = os.path.abspath('../..')
+sys.path.insert(0, openpype_root)
+# app = QApplication([])
+
+"""
repos = os.listdir(os.path.abspath("../../repos"))
-repos = [os.path.join(pype_root, "repos", repo) for repo in repos]
+repos = [os.path.join(openpype_root, "repos", repo) for repo in repos]
for repo in repos:
sys.path.append(repo)
+"""
+
+todo_include_todos = True
+autodoc_mock_imports = ["maya", "pymel", "nuke", "nukestudio", "nukescripts",
+ "hiero", "bpy", "fusion", "houdini", "hou", "unreal",
+ "__builtin__", "resolve", "pysync", "DaVinciResolveScript"]
# -- Project information -----------------------------------------------------
-project = 'pype'
-copyright = '2019, Orbi Tools'
-author = 'Orbi Tools'
+project = 'OpenPype'
+copyright = '2023 Ynput'
+author = 'Ynput'
# The short X.Y version
version = ''
@@ -52,11 +63,41 @@ extensions = [
'sphinx.ext.todo',
'sphinx.ext.coverage',
'sphinx.ext.mathjax',
- 'sphinx.ext.viewcode',
'sphinx.ext.autosummary',
- 'recommonmark'
+ 'revitron_sphinx_theme',
+ 'autoapi.extension',
+ 'myst_parser'
]
+##############################
+# Autoapi settings
+##############################
+
+autoapi_dirs = ['../../openpype', '../../igniter']
+
+# bypass modules with a lot of python2 content for now
+autoapi_ignore = [
+ "*vendor*",
+ "*schemas*",
+ "*startup/*",
+ "*/website*",
+ "*openpype/hooks*",
+ "*openpype/style*",
+ "openpype/tests*",
+ # to many levels of relative import:
+ "*/modules/sync_server/*"
+]
+autoapi_keep_files = True
+autoapi_options = [
+ 'members',
+ 'undoc-members',
+ 'show-inheritance',
+ 'show-module-summary'
+]
+autoapi_add_toctree_entry = True
+autoapi_template_dir = '_templates/autoapi'
+
+
# Add any paths that contain templates here, relative to this directory.
templates_path = ['_templates']
@@ -64,7 +105,7 @@ templates_path = ['_templates']
# You can specify multiple suffix as a list of string:
#
# source_suffix = ['.rst', '.md']
-source_suffix = '.rst'
+source_suffix = ['.rst', '.md']
# The master toctree document.
master_doc = 'index'
@@ -74,12 +115,15 @@ master_doc = 'index'
#
# This is also used if you do content translation via gettext catalogs.
# Usually you set "language" from the command line for these cases.
-language = None
+language = "English"
# List of patterns, relative to source directory, that match files and
# directories to ignore when looking for source files.
# This pattern also affects html_static_path and html_extra_path.
-exclude_patterns = []
+exclude_patterns = [
+ "openpype.hosts.resolve.*",
+ "openpype.tools.*"
+ ]
# The name of the Pygments (syntax highlighting) style to use.
pygments_style = 'friendly'
@@ -97,15 +141,22 @@ autosummary_generate = True
# The theme to use for HTML and HTML Help pages. See the documentation for
# a list of builtin themes.
#
-html_theme = 'sphinx_rtd_theme'
+html_theme = 'revitron_sphinx_theme'
# Theme options are theme-specific and customize the look and feel of a theme
# further. For a list of options available for each theme, see the
# documentation.
#
html_theme_options = {
- 'collapse_navigation': False
+ 'collapse_navigation': True,
+ 'sticky_navigation': True,
+ 'navigation_depth': 4,
+ 'includehidden': True,
+ 'titles_only': False,
+ 'github_url': '',
}
+html_logo = '_static/AYON_tight_G.svg'
+
# Add any paths that contain custom static files (such as style sheets) here,
# relative to this directory. They are copied after the builtin static files,
@@ -153,8 +204,8 @@ latex_elements = {
# (source start file, target name, title,
# author, documentclass [howto, manual, or own class]).
latex_documents = [
- (master_doc, 'pype.tex', 'pype Documentation',
- 'OrbiTools', 'manual'),
+ (master_doc, 'openpype.tex', 'OpenPype Documentation',
+ 'Ynput', 'manual'),
]
@@ -163,7 +214,7 @@ latex_documents = [
# One entry per manual page. List of tuples
# (source start file, name, description, authors, manual section).
man_pages = [
- (master_doc, 'pype', 'pype Documentation',
+ (master_doc, 'openpype', 'OpenPype Documentation',
[author], 1)
]
@@ -174,8 +225,8 @@ man_pages = [
# (source start file, target name, title, author,
# dir menu entry, description, category)
texinfo_documents = [
- (master_doc, 'pype', 'pype Documentation',
- author, 'pype', 'One line description of project.',
+ (master_doc, 'OpenPype', 'OpenPype Documentation',
+ author, 'OpenPype', 'Pipeline for studios',
'Miscellaneous'),
]
@@ -207,7 +258,4 @@ intersphinx_mapping = {
'https://docs.python.org/3/': None
}
-# -- Options for todo extension ----------------------------------------------
-
-# If true, `todo` and `todoList` produce output, else they produce nothing.
-todo_include_todos = True
+myst_gfm_only = True
diff --git a/docs/source/igniter.bootstrap_repos.rst b/docs/source/igniter.bootstrap_repos.rst
deleted file mode 100644
index 7c6e0a0757..0000000000
--- a/docs/source/igniter.bootstrap_repos.rst
+++ /dev/null
@@ -1,7 +0,0 @@
-igniter.bootstrap\_repos module
-===============================
-
-.. automodule:: igniter.bootstrap_repos
- :members:
- :undoc-members:
- :show-inheritance:
diff --git a/docs/source/igniter.install_dialog.rst b/docs/source/igniter.install_dialog.rst
deleted file mode 100644
index bf30ec270e..0000000000
--- a/docs/source/igniter.install_dialog.rst
+++ /dev/null
@@ -1,7 +0,0 @@
-igniter.install\_dialog module
-==============================
-
-.. automodule:: igniter.install_dialog
- :members:
- :undoc-members:
- :show-inheritance:
diff --git a/docs/source/igniter.install_thread.rst b/docs/source/igniter.install_thread.rst
deleted file mode 100644
index 6c19516219..0000000000
--- a/docs/source/igniter.install_thread.rst
+++ /dev/null
@@ -1,7 +0,0 @@
-igniter.install\_thread module
-==============================
-
-.. automodule:: igniter.install_thread
- :members:
- :undoc-members:
- :show-inheritance:
diff --git a/docs/source/igniter.rst b/docs/source/igniter.rst
deleted file mode 100644
index b4aebe88b0..0000000000
--- a/docs/source/igniter.rst
+++ /dev/null
@@ -1,42 +0,0 @@
-igniter package
-===============
-
-.. automodule:: igniter
- :members:
- :undoc-members:
- :show-inheritance:
-
-Submodules
-----------
-
-igniter.bootstrap\_repos module
--------------------------------
-
-.. automodule:: igniter.bootstrap_repos
- :members:
- :undoc-members:
- :show-inheritance:
-
-igniter.install\_dialog module
-------------------------------
-
-.. automodule:: igniter.install_dialog
- :members:
- :undoc-members:
- :show-inheritance:
-
-igniter.install\_thread module
-------------------------------
-
-.. automodule:: igniter.install_thread
- :members:
- :undoc-members:
- :show-inheritance:
-
-igniter.tools module
---------------------
-
-.. automodule:: igniter.tools
- :members:
- :undoc-members:
- :show-inheritance:
diff --git a/docs/source/igniter.tools.rst b/docs/source/igniter.tools.rst
deleted file mode 100644
index 4fdbdf9d29..0000000000
--- a/docs/source/igniter.tools.rst
+++ /dev/null
@@ -1,7 +0,0 @@
-igniter.tools module
-====================
-
-.. automodule:: igniter.tools
- :members:
- :undoc-members:
- :show-inheritance:
diff --git a/docs/source/index.rst b/docs/source/index.rst
index b54d153894..f703468fca 100644
--- a/docs/source/index.rst
+++ b/docs/source/index.rst
@@ -1,14 +1,15 @@
-.. pype documentation master file, created by
+.. openpype documentation master file, created by
sphinx-quickstart on Mon May 13 17:18:23 2019.
You can adapt this file completely to your liking, but it should at least
contain the root `toctree` directive.
-Welcome to pype's documentation!
-================================
+Welcome to OpenPype's API documentation!
+========================================
.. toctree::
- readme
- modules
+
+ Readme
+
Indices and tables
==================
diff --git a/docs/source/modules.rst b/docs/source/modules.rst
deleted file mode 100644
index 1956d9ed04..0000000000
--- a/docs/source/modules.rst
+++ /dev/null
@@ -1,8 +0,0 @@
-igniter
-=======
-
-.. toctree::
- :maxdepth: 6
-
- igniter
- pype
\ No newline at end of file
diff --git a/docs/source/pype.action.rst b/docs/source/pype.action.rst
deleted file mode 100644
index 62a32e08b5..0000000000
--- a/docs/source/pype.action.rst
+++ /dev/null
@@ -1,7 +0,0 @@
-pype.action module
-==================
-
-.. automodule:: pype.action
- :members:
- :undoc-members:
- :show-inheritance:
diff --git a/docs/source/pype.api.rst b/docs/source/pype.api.rst
deleted file mode 100644
index af3602a895..0000000000
--- a/docs/source/pype.api.rst
+++ /dev/null
@@ -1,7 +0,0 @@
-pype.api module
-===============
-
-.. automodule:: pype.api
- :members:
- :undoc-members:
- :show-inheritance:
diff --git a/docs/source/pype.cli.rst b/docs/source/pype.cli.rst
deleted file mode 100644
index 7e4a336fa9..0000000000
--- a/docs/source/pype.cli.rst
+++ /dev/null
@@ -1,7 +0,0 @@
-pype.cli module
-===============
-
-.. automodule:: pype.cli
- :members:
- :undoc-members:
- :show-inheritance:
diff --git a/docs/source/pype.hosts.aftereffects.rst b/docs/source/pype.hosts.aftereffects.rst
deleted file mode 100644
index 3c2b2dda41..0000000000
--- a/docs/source/pype.hosts.aftereffects.rst
+++ /dev/null
@@ -1,7 +0,0 @@
-pype.hosts.aftereffects package
-===============================
-
-.. automodule:: pype.hosts.aftereffects
- :members:
- :undoc-members:
- :show-inheritance:
diff --git a/docs/source/pype.hosts.blender.action.rst b/docs/source/pype.hosts.blender.action.rst
deleted file mode 100644
index a6444b1efc..0000000000
--- a/docs/source/pype.hosts.blender.action.rst
+++ /dev/null
@@ -1,7 +0,0 @@
-pype.hosts.blender.action module
-================================
-
-.. automodule:: pype.hosts.blender.action
- :members:
- :undoc-members:
- :show-inheritance:
diff --git a/docs/source/pype.hosts.blender.plugin.rst b/docs/source/pype.hosts.blender.plugin.rst
deleted file mode 100644
index cf6a8feec8..0000000000
--- a/docs/source/pype.hosts.blender.plugin.rst
+++ /dev/null
@@ -1,7 +0,0 @@
-pype.hosts.blender.plugin module
-================================
-
-.. automodule:: pype.hosts.blender.plugin
- :members:
- :undoc-members:
- :show-inheritance:
diff --git a/docs/source/pype.hosts.blender.rst b/docs/source/pype.hosts.blender.rst
deleted file mode 100644
index 19cb85e5f3..0000000000
--- a/docs/source/pype.hosts.blender.rst
+++ /dev/null
@@ -1,26 +0,0 @@
-pype.hosts.blender package
-==========================
-
-.. automodule:: pype.hosts.blender
- :members:
- :undoc-members:
- :show-inheritance:
-
-Submodules
-----------
-
-pype.hosts.blender.action module
---------------------------------
-
-.. automodule:: pype.hosts.blender.action
- :members:
- :undoc-members:
- :show-inheritance:
-
-pype.hosts.blender.plugin module
---------------------------------
-
-.. automodule:: pype.hosts.blender.plugin
- :members:
- :undoc-members:
- :show-inheritance:
diff --git a/docs/source/pype.hosts.celaction.cli.rst b/docs/source/pype.hosts.celaction.cli.rst
deleted file mode 100644
index c8843b90bd..0000000000
--- a/docs/source/pype.hosts.celaction.cli.rst
+++ /dev/null
@@ -1,7 +0,0 @@
-pype.hosts.celaction.cli module
-===============================
-
-.. automodule:: pype.hosts.celaction.cli
- :members:
- :undoc-members:
- :show-inheritance:
diff --git a/docs/source/pype.hosts.celaction.rst b/docs/source/pype.hosts.celaction.rst
deleted file mode 100644
index 1aa236397e..0000000000
--- a/docs/source/pype.hosts.celaction.rst
+++ /dev/null
@@ -1,18 +0,0 @@
-pype.hosts.celaction package
-============================
-
-.. automodule:: pype.hosts.celaction
- :members:
- :undoc-members:
- :show-inheritance:
-
-Submodules
-----------
-
-pype.hosts.celaction.cli module
--------------------------------
-
-.. automodule:: pype.hosts.celaction.cli
- :members:
- :undoc-members:
- :show-inheritance:
diff --git a/docs/source/pype.hosts.fusion.lib.rst b/docs/source/pype.hosts.fusion.lib.rst
deleted file mode 100644
index 32b8f501f5..0000000000
--- a/docs/source/pype.hosts.fusion.lib.rst
+++ /dev/null
@@ -1,7 +0,0 @@
-pype.hosts.fusion.lib module
-============================
-
-.. automodule:: pype.hosts.fusion.lib
- :members:
- :undoc-members:
- :show-inheritance:
diff --git a/docs/source/pype.hosts.fusion.menu.rst b/docs/source/pype.hosts.fusion.menu.rst
deleted file mode 100644
index ec5bf76612..0000000000
--- a/docs/source/pype.hosts.fusion.menu.rst
+++ /dev/null
@@ -1,7 +0,0 @@
-pype.hosts.fusion.menu module
-=============================
-
-.. automodule:: pype.hosts.fusion.menu
- :members:
- :undoc-members:
- :show-inheritance:
diff --git a/docs/source/pype.hosts.fusion.pipeline.rst b/docs/source/pype.hosts.fusion.pipeline.rst
deleted file mode 100644
index ff2a6440a8..0000000000
--- a/docs/source/pype.hosts.fusion.pipeline.rst
+++ /dev/null
@@ -1,7 +0,0 @@
-pype.hosts.fusion.pipeline module
-=================================
-
-.. automodule:: pype.hosts.fusion.pipeline
- :members:
- :undoc-members:
- :show-inheritance:
diff --git a/docs/source/pype.hosts.fusion.rst b/docs/source/pype.hosts.fusion.rst
deleted file mode 100644
index 7c2fee827c..0000000000
--- a/docs/source/pype.hosts.fusion.rst
+++ /dev/null
@@ -1,26 +0,0 @@
-pype.hosts.fusion package
-=========================
-
-.. automodule:: pype.hosts.fusion
- :members:
- :undoc-members:
- :show-inheritance:
-
-Subpackages
------------
-
-.. toctree::
- :maxdepth: 6
-
- pype.hosts.fusion.scripts
-
-Submodules
-----------
-
-pype.hosts.fusion.lib module
-----------------------------
-
-.. automodule:: pype.hosts.fusion.lib
- :members:
- :undoc-members:
- :show-inheritance:
diff --git a/docs/source/pype.hosts.fusion.scripts.duplicate_with_inputs.rst b/docs/source/pype.hosts.fusion.scripts.duplicate_with_inputs.rst
deleted file mode 100644
index 2503c20f3b..0000000000
--- a/docs/source/pype.hosts.fusion.scripts.duplicate_with_inputs.rst
+++ /dev/null
@@ -1,7 +0,0 @@
-pype.hosts.fusion.scripts.duplicate\_with\_inputs module
-========================================================
-
-.. automodule:: pype.hosts.fusion.scripts.duplicate_with_inputs
- :members:
- :undoc-members:
- :show-inheritance:
diff --git a/docs/source/pype.hosts.fusion.scripts.fusion_switch_shot.rst b/docs/source/pype.hosts.fusion.scripts.fusion_switch_shot.rst
deleted file mode 100644
index 770300116f..0000000000
--- a/docs/source/pype.hosts.fusion.scripts.fusion_switch_shot.rst
+++ /dev/null
@@ -1,7 +0,0 @@
-pype.hosts.fusion.scripts.fusion\_switch\_shot module
-=====================================================
-
-.. automodule:: pype.hosts.fusion.scripts.fusion_switch_shot
- :members:
- :undoc-members:
- :show-inheritance:
diff --git a/docs/source/pype.hosts.fusion.scripts.rst b/docs/source/pype.hosts.fusion.scripts.rst
deleted file mode 100644
index 5de5f66652..0000000000
--- a/docs/source/pype.hosts.fusion.scripts.rst
+++ /dev/null
@@ -1,26 +0,0 @@
-pype.hosts.fusion.scripts package
-=================================
-
-.. automodule:: pype.hosts.fusion.scripts
- :members:
- :undoc-members:
- :show-inheritance:
-
-Submodules
-----------
-
-pype.hosts.fusion.scripts.fusion\_switch\_shot module
------------------------------------------------------
-
-.. automodule:: pype.hosts.fusion.scripts.fusion_switch_shot
- :members:
- :undoc-members:
- :show-inheritance:
-
-pype.hosts.fusion.scripts.publish\_filesequence module
-------------------------------------------------------
-
-.. automodule:: pype.hosts.fusion.scripts.publish_filesequence
- :members:
- :undoc-members:
- :show-inheritance:
diff --git a/docs/source/pype.hosts.fusion.scripts.set_rendermode.rst b/docs/source/pype.hosts.fusion.scripts.set_rendermode.rst
deleted file mode 100644
index 27bff63466..0000000000
--- a/docs/source/pype.hosts.fusion.scripts.set_rendermode.rst
+++ /dev/null
@@ -1,7 +0,0 @@
-pype.hosts.fusion.scripts.set\_rendermode module
-================================================
-
-.. automodule:: pype.hosts.fusion.scripts.set_rendermode
- :members:
- :undoc-members:
- :show-inheritance:
diff --git a/docs/source/pype.hosts.fusion.utils.rst b/docs/source/pype.hosts.fusion.utils.rst
deleted file mode 100644
index b6de3d0510..0000000000
--- a/docs/source/pype.hosts.fusion.utils.rst
+++ /dev/null
@@ -1,7 +0,0 @@
-pype.hosts.fusion.utils module
-==============================
-
-.. automodule:: pype.hosts.fusion.utils
- :members:
- :undoc-members:
- :show-inheritance:
diff --git a/docs/source/pype.hosts.harmony.rst b/docs/source/pype.hosts.harmony.rst
deleted file mode 100644
index 60e1fcdce6..0000000000
--- a/docs/source/pype.hosts.harmony.rst
+++ /dev/null
@@ -1,7 +0,0 @@
-pype.hosts.harmony package
-==========================
-
-.. automodule:: pype.hosts.harmony
- :members:
- :undoc-members:
- :show-inheritance:
diff --git a/docs/source/pype.hosts.hiero.events.rst b/docs/source/pype.hosts.hiero.events.rst
deleted file mode 100644
index 874abbffba..0000000000
--- a/docs/source/pype.hosts.hiero.events.rst
+++ /dev/null
@@ -1,7 +0,0 @@
-pype.hosts.hiero.events module
-==============================
-
-.. automodule:: pype.hosts.hiero.events
- :members:
- :undoc-members:
- :show-inheritance:
diff --git a/docs/source/pype.hosts.hiero.lib.rst b/docs/source/pype.hosts.hiero.lib.rst
deleted file mode 100644
index 8c0d33b03b..0000000000
--- a/docs/source/pype.hosts.hiero.lib.rst
+++ /dev/null
@@ -1,7 +0,0 @@
-pype.hosts.hiero.lib module
-===========================
-
-.. automodule:: pype.hosts.hiero.lib
- :members:
- :undoc-members:
- :show-inheritance:
diff --git a/docs/source/pype.hosts.hiero.menu.rst b/docs/source/pype.hosts.hiero.menu.rst
deleted file mode 100644
index baa1317e61..0000000000
--- a/docs/source/pype.hosts.hiero.menu.rst
+++ /dev/null
@@ -1,7 +0,0 @@
-pype.hosts.hiero.menu module
-============================
-
-.. automodule:: pype.hosts.hiero.menu
- :members:
- :undoc-members:
- :show-inheritance:
diff --git a/docs/source/pype.hosts.hiero.rst b/docs/source/pype.hosts.hiero.rst
deleted file mode 100644
index 9a7891b45e..0000000000
--- a/docs/source/pype.hosts.hiero.rst
+++ /dev/null
@@ -1,19 +0,0 @@
-pype.hosts.hiero package
-========================
-
-.. automodule:: pype.hosts.hiero
- :members:
- :undoc-members:
- :show-inheritance:
-
-Submodules
-----------
-
-.. toctree::
- :maxdepth: 10
-
- pype.hosts.hiero.events
- pype.hosts.hiero.lib
- pype.hosts.hiero.menu
- pype.hosts.hiero.tags
- pype.hosts.hiero.workio
diff --git a/docs/source/pype.hosts.hiero.tags.rst b/docs/source/pype.hosts.hiero.tags.rst
deleted file mode 100644
index 0df33279d5..0000000000
--- a/docs/source/pype.hosts.hiero.tags.rst
+++ /dev/null
@@ -1,7 +0,0 @@
-pype.hosts.hiero.tags module
-============================
-
-.. automodule:: pype.hosts.hiero.tags
- :members:
- :undoc-members:
- :show-inheritance:
diff --git a/docs/source/pype.hosts.hiero.workio.rst b/docs/source/pype.hosts.hiero.workio.rst
deleted file mode 100644
index 11aae43212..0000000000
--- a/docs/source/pype.hosts.hiero.workio.rst
+++ /dev/null
@@ -1,7 +0,0 @@
-pype.hosts.hiero.workio module
-==============================
-
-.. automodule:: pype.hosts.hiero.workio
- :members:
- :undoc-members:
- :show-inheritance:
diff --git a/docs/source/pype.hosts.houdini.lib.rst b/docs/source/pype.hosts.houdini.lib.rst
deleted file mode 100644
index ba6e60d5f3..0000000000
--- a/docs/source/pype.hosts.houdini.lib.rst
+++ /dev/null
@@ -1,7 +0,0 @@
-pype.hosts.houdini.lib module
-=============================
-
-.. automodule:: pype.hosts.houdini.lib
- :members:
- :undoc-members:
- :show-inheritance:
diff --git a/docs/source/pype.hosts.houdini.rst b/docs/source/pype.hosts.houdini.rst
deleted file mode 100644
index 5db18ab3d4..0000000000
--- a/docs/source/pype.hosts.houdini.rst
+++ /dev/null
@@ -1,18 +0,0 @@
-pype.hosts.houdini package
-==========================
-
-.. automodule:: pype.hosts.houdini
- :members:
- :undoc-members:
- :show-inheritance:
-
-Submodules
-----------
-
-pype.hosts.houdini.lib module
------------------------------
-
-.. automodule:: pype.hosts.houdini.lib
- :members:
- :undoc-members:
- :show-inheritance:
diff --git a/docs/source/pype.hosts.maya.action.rst b/docs/source/pype.hosts.maya.action.rst
deleted file mode 100644
index e1ad7e5d43..0000000000
--- a/docs/source/pype.hosts.maya.action.rst
+++ /dev/null
@@ -1,7 +0,0 @@
-pype.hosts.maya.action module
-=============================
-
-.. automodule:: pype.hosts.maya.action
- :members:
- :undoc-members:
- :show-inheritance:
diff --git a/docs/source/pype.hosts.maya.customize.rst b/docs/source/pype.hosts.maya.customize.rst
deleted file mode 100644
index 335e75b0d4..0000000000
--- a/docs/source/pype.hosts.maya.customize.rst
+++ /dev/null
@@ -1,7 +0,0 @@
-pype.hosts.maya.customize module
-================================
-
-.. automodule:: pype.hosts.maya.customize
- :members:
- :undoc-members:
- :show-inheritance:
diff --git a/docs/source/pype.hosts.maya.expected_files.rst b/docs/source/pype.hosts.maya.expected_files.rst
deleted file mode 100644
index 0ecf22e502..0000000000
--- a/docs/source/pype.hosts.maya.expected_files.rst
+++ /dev/null
@@ -1,7 +0,0 @@
-pype.hosts.maya.expected\_files module
-======================================
-
-.. automodule:: pype.hosts.maya.expected_files
- :members:
- :undoc-members:
- :show-inheritance:
diff --git a/docs/source/pype.hosts.maya.lib.rst b/docs/source/pype.hosts.maya.lib.rst
deleted file mode 100644
index 7d7dbe4502..0000000000
--- a/docs/source/pype.hosts.maya.lib.rst
+++ /dev/null
@@ -1,7 +0,0 @@
-pype.hosts.maya.lib module
-==========================
-
-.. automodule:: pype.hosts.maya.lib
- :members:
- :undoc-members:
- :show-inheritance:
diff --git a/docs/source/pype.hosts.maya.menu.rst b/docs/source/pype.hosts.maya.menu.rst
deleted file mode 100644
index 614e113769..0000000000
--- a/docs/source/pype.hosts.maya.menu.rst
+++ /dev/null
@@ -1,7 +0,0 @@
-pype.hosts.maya.menu module
-===========================
-
-.. automodule:: pype.hosts.maya.menu
- :members:
- :undoc-members:
- :show-inheritance:
diff --git a/docs/source/pype.hosts.maya.plugin.rst b/docs/source/pype.hosts.maya.plugin.rst
deleted file mode 100644
index 5796b40c70..0000000000
--- a/docs/source/pype.hosts.maya.plugin.rst
+++ /dev/null
@@ -1,7 +0,0 @@
-pype.hosts.maya.plugin module
-=============================
-
-.. automodule:: pype.hosts.maya.plugin
- :members:
- :undoc-members:
- :show-inheritance:
diff --git a/docs/source/pype.hosts.maya.rst b/docs/source/pype.hosts.maya.rst
deleted file mode 100644
index 0beab888fc..0000000000
--- a/docs/source/pype.hosts.maya.rst
+++ /dev/null
@@ -1,58 +0,0 @@
-pype.hosts.maya package
-=======================
-
-.. automodule:: pype.hosts.maya
- :members:
- :undoc-members:
- :show-inheritance:
-
-Submodules
-----------
-
-pype.hosts.maya.action module
------------------------------
-
-.. automodule:: pype.hosts.maya.action
- :members:
- :undoc-members:
- :show-inheritance:
-
-pype.hosts.maya.customize module
---------------------------------
-
-.. automodule:: pype.hosts.maya.customize
- :members:
- :undoc-members:
- :show-inheritance:
-
-pype.hosts.maya.expected\_files module
---------------------------------------
-
-.. automodule:: pype.hosts.maya.expected_files
- :members:
- :undoc-members:
- :show-inheritance:
-
-pype.hosts.maya.lib module
---------------------------
-
-.. automodule:: pype.hosts.maya.lib
- :members:
- :undoc-members:
- :show-inheritance:
-
-pype.hosts.maya.menu module
----------------------------
-
-.. automodule:: pype.hosts.maya.menu
- :members:
- :undoc-members:
- :show-inheritance:
-
-pype.hosts.maya.plugin module
------------------------------
-
-.. automodule:: pype.hosts.maya.plugin
- :members:
- :undoc-members:
- :show-inheritance:
diff --git a/docs/source/pype.hosts.nuke.actions.rst b/docs/source/pype.hosts.nuke.actions.rst
deleted file mode 100644
index d5e8849a38..0000000000
--- a/docs/source/pype.hosts.nuke.actions.rst
+++ /dev/null
@@ -1,7 +0,0 @@
-pype.hosts.nuke.actions module
-==============================
-
-.. automodule:: pype.hosts.nuke.actions
- :members:
- :undoc-members:
- :show-inheritance:
diff --git a/docs/source/pype.hosts.nuke.lib.rst b/docs/source/pype.hosts.nuke.lib.rst
deleted file mode 100644
index c177a27f2d..0000000000
--- a/docs/source/pype.hosts.nuke.lib.rst
+++ /dev/null
@@ -1,7 +0,0 @@
-pype.hosts.nuke.lib module
-==========================
-
-.. automodule:: pype.hosts.nuke.lib
- :members:
- :undoc-members:
- :show-inheritance:
diff --git a/docs/source/pype.hosts.nuke.menu.rst b/docs/source/pype.hosts.nuke.menu.rst
deleted file mode 100644
index 190e488b95..0000000000
--- a/docs/source/pype.hosts.nuke.menu.rst
+++ /dev/null
@@ -1,7 +0,0 @@
-pype.hosts.nuke.menu module
-===========================
-
-.. automodule:: pype.hosts.nuke.menu
- :members:
- :undoc-members:
- :show-inheritance:
diff --git a/docs/source/pype.hosts.nuke.plugin.rst b/docs/source/pype.hosts.nuke.plugin.rst
deleted file mode 100644
index ddd5f1db89..0000000000
--- a/docs/source/pype.hosts.nuke.plugin.rst
+++ /dev/null
@@ -1,7 +0,0 @@
-pype.hosts.nuke.plugin module
-=============================
-
-.. automodule:: pype.hosts.nuke.plugin
- :members:
- :undoc-members:
- :show-inheritance:
diff --git a/docs/source/pype.hosts.nuke.presets.rst b/docs/source/pype.hosts.nuke.presets.rst
deleted file mode 100644
index a69aa8a367..0000000000
--- a/docs/source/pype.hosts.nuke.presets.rst
+++ /dev/null
@@ -1,7 +0,0 @@
-pype.hosts.nuke.presets module
-==============================
-
-.. automodule:: pype.hosts.nuke.presets
- :members:
- :undoc-members:
- :show-inheritance:
diff --git a/docs/source/pype.hosts.nuke.rst b/docs/source/pype.hosts.nuke.rst
deleted file mode 100644
index 559de65927..0000000000
--- a/docs/source/pype.hosts.nuke.rst
+++ /dev/null
@@ -1,58 +0,0 @@
-pype.hosts.nuke package
-=======================
-
-.. automodule:: pype.hosts.nuke
- :members:
- :undoc-members:
- :show-inheritance:
-
-Submodules
-----------
-
-pype.hosts.nuke.actions module
-------------------------------
-
-.. automodule:: pype.hosts.nuke.actions
- :members:
- :undoc-members:
- :show-inheritance:
-
-pype.hosts.nuke.lib module
---------------------------
-
-.. automodule:: pype.hosts.nuke.lib
- :members:
- :undoc-members:
- :show-inheritance:
-
-pype.hosts.nuke.menu module
----------------------------
-
-.. automodule:: pype.hosts.nuke.menu
- :members:
- :undoc-members:
- :show-inheritance:
-
-pype.hosts.nuke.plugin module
------------------------------
-
-.. automodule:: pype.hosts.nuke.plugin
- :members:
- :undoc-members:
- :show-inheritance:
-
-pype.hosts.nuke.presets module
-------------------------------
-
-.. automodule:: pype.hosts.nuke.presets
- :members:
- :undoc-members:
- :show-inheritance:
-
-pype.hosts.nuke.utils module
-----------------------------
-
-.. automodule:: pype.hosts.nuke.utils
- :members:
- :undoc-members:
- :show-inheritance:
diff --git a/docs/source/pype.hosts.nuke.utils.rst b/docs/source/pype.hosts.nuke.utils.rst
deleted file mode 100644
index 66974dc707..0000000000
--- a/docs/source/pype.hosts.nuke.utils.rst
+++ /dev/null
@@ -1,7 +0,0 @@
-pype.hosts.nuke.utils module
-============================
-
-.. automodule:: pype.hosts.nuke.utils
- :members:
- :undoc-members:
- :show-inheritance:
diff --git a/docs/source/pype.hosts.nukestudio.rst b/docs/source/pype.hosts.nukestudio.rst
deleted file mode 100644
index c718d699fa..0000000000
--- a/docs/source/pype.hosts.nukestudio.rst
+++ /dev/null
@@ -1,50 +0,0 @@
-pype.hosts.nukestudio package
-=============================
-
-.. automodule:: pype.hosts.nukestudio
- :members:
- :undoc-members:
- :show-inheritance:
-
-Submodules
-----------
-
-pype.hosts.nukestudio.events module
------------------------------------
-
-.. automodule:: pype.hosts.nukestudio.events
- :members:
- :undoc-members:
- :show-inheritance:
-
-pype.hosts.nukestudio.lib module
---------------------------------
-
-.. automodule:: pype.hosts.nukestudio.lib
- :members:
- :undoc-members:
- :show-inheritance:
-
-pype.hosts.nukestudio.menu module
----------------------------------
-
-.. automodule:: pype.hosts.nukestudio.menu
- :members:
- :undoc-members:
- :show-inheritance:
-
-pype.hosts.nukestudio.tags module
----------------------------------
-
-.. automodule:: pype.hosts.nukestudio.tags
- :members:
- :undoc-members:
- :show-inheritance:
-
-pype.hosts.nukestudio.workio module
------------------------------------
-
-.. automodule:: pype.hosts.nukestudio.workio
- :members:
- :undoc-members:
- :show-inheritance:
diff --git a/docs/source/pype.hosts.photoshop.rst b/docs/source/pype.hosts.photoshop.rst
deleted file mode 100644
index f77ea79874..0000000000
--- a/docs/source/pype.hosts.photoshop.rst
+++ /dev/null
@@ -1,7 +0,0 @@
-pype.hosts.photoshop package
-============================
-
-.. automodule:: pype.hosts.photoshop
- :members:
- :undoc-members:
- :show-inheritance:
diff --git a/docs/source/pype.hosts.premiere.lib.rst b/docs/source/pype.hosts.premiere.lib.rst
deleted file mode 100644
index e2c2723841..0000000000
--- a/docs/source/pype.hosts.premiere.lib.rst
+++ /dev/null
@@ -1,7 +0,0 @@
-pype.hosts.premiere.lib module
-==============================
-
-.. automodule:: pype.hosts.premiere.lib
- :members:
- :undoc-members:
- :show-inheritance:
diff --git a/docs/source/pype.hosts.premiere.rst b/docs/source/pype.hosts.premiere.rst
deleted file mode 100644
index 7c38d52c22..0000000000
--- a/docs/source/pype.hosts.premiere.rst
+++ /dev/null
@@ -1,18 +0,0 @@
-pype.hosts.premiere package
-===========================
-
-.. automodule:: pype.hosts.premiere
- :members:
- :undoc-members:
- :show-inheritance:
-
-Submodules
-----------
-
-pype.hosts.premiere.lib module
-------------------------------
-
-.. automodule:: pype.hosts.premiere.lib
- :members:
- :undoc-members:
- :show-inheritance:
diff --git a/docs/source/pype.hosts.resolve.action.rst b/docs/source/pype.hosts.resolve.action.rst
deleted file mode 100644
index 781694781f..0000000000
--- a/docs/source/pype.hosts.resolve.action.rst
+++ /dev/null
@@ -1,7 +0,0 @@
-pype.hosts.resolve.action module
-================================
-
-.. automodule:: pype.hosts.resolve.action
- :members:
- :undoc-members:
- :show-inheritance:
diff --git a/docs/source/pype.hosts.resolve.lib.rst b/docs/source/pype.hosts.resolve.lib.rst
deleted file mode 100644
index 5860f783cc..0000000000
--- a/docs/source/pype.hosts.resolve.lib.rst
+++ /dev/null
@@ -1,7 +0,0 @@
-pype.hosts.resolve.lib module
-=============================
-
-.. automodule:: pype.hosts.resolve.lib
- :members:
- :undoc-members:
- :show-inheritance:
diff --git a/docs/source/pype.hosts.resolve.menu.rst b/docs/source/pype.hosts.resolve.menu.rst
deleted file mode 100644
index df87dcde98..0000000000
--- a/docs/source/pype.hosts.resolve.menu.rst
+++ /dev/null
@@ -1,7 +0,0 @@
-pype.hosts.resolve.menu module
-==============================
-
-.. automodule:: pype.hosts.resolve.menu
- :members:
- :undoc-members:
- :show-inheritance:
diff --git a/docs/source/pype.hosts.resolve.otio.davinci_export.rst b/docs/source/pype.hosts.resolve.otio.davinci_export.rst
deleted file mode 100644
index 498f96a7ed..0000000000
--- a/docs/source/pype.hosts.resolve.otio.davinci_export.rst
+++ /dev/null
@@ -1,7 +0,0 @@
-pype.hosts.resolve.otio.davinci\_export module
-==============================================
-
-.. automodule:: pype.hosts.resolve.otio.davinci_export
- :members:
- :undoc-members:
- :show-inheritance:
diff --git a/docs/source/pype.hosts.resolve.otio.davinci_import.rst b/docs/source/pype.hosts.resolve.otio.davinci_import.rst
deleted file mode 100644
index 30f43cc9fe..0000000000
--- a/docs/source/pype.hosts.resolve.otio.davinci_import.rst
+++ /dev/null
@@ -1,7 +0,0 @@
-pype.hosts.resolve.otio.davinci\_import module
-==============================================
-
-.. automodule:: pype.hosts.resolve.otio.davinci_import
- :members:
- :undoc-members:
- :show-inheritance:
diff --git a/docs/source/pype.hosts.resolve.otio.rst b/docs/source/pype.hosts.resolve.otio.rst
deleted file mode 100644
index 523d8937ca..0000000000
--- a/docs/source/pype.hosts.resolve.otio.rst
+++ /dev/null
@@ -1,17 +0,0 @@
-pype.hosts.resolve.otio package
-===============================
-
-.. automodule:: pype.hosts.resolve.otio
- :members:
- :undoc-members:
- :show-inheritance:
-
-Submodules
-----------
-
-.. toctree::
- :maxdepth: 10
-
- pype.hosts.resolve.otio.davinci_export
- pype.hosts.resolve.otio.davinci_import
- pype.hosts.resolve.otio.utils
diff --git a/docs/source/pype.hosts.resolve.otio.utils.rst b/docs/source/pype.hosts.resolve.otio.utils.rst
deleted file mode 100644
index 765f492732..0000000000
--- a/docs/source/pype.hosts.resolve.otio.utils.rst
+++ /dev/null
@@ -1,7 +0,0 @@
-pype.hosts.resolve.otio.utils module
-====================================
-
-.. automodule:: pype.hosts.resolve.otio.utils
- :members:
- :undoc-members:
- :show-inheritance:
diff --git a/docs/source/pype.hosts.resolve.pipeline.rst b/docs/source/pype.hosts.resolve.pipeline.rst
deleted file mode 100644
index 3efc24137b..0000000000
--- a/docs/source/pype.hosts.resolve.pipeline.rst
+++ /dev/null
@@ -1,7 +0,0 @@
-pype.hosts.resolve.pipeline module
-==================================
-
-.. automodule:: pype.hosts.resolve.pipeline
- :members:
- :undoc-members:
- :show-inheritance:
diff --git a/docs/source/pype.hosts.resolve.plugin.rst b/docs/source/pype.hosts.resolve.plugin.rst
deleted file mode 100644
index 26f6c56aef..0000000000
--- a/docs/source/pype.hosts.resolve.plugin.rst
+++ /dev/null
@@ -1,7 +0,0 @@
-pype.hosts.resolve.plugin module
-================================
-
-.. automodule:: pype.hosts.resolve.plugin
- :members:
- :undoc-members:
- :show-inheritance:
diff --git a/docs/source/pype.hosts.resolve.preload_console.rst b/docs/source/pype.hosts.resolve.preload_console.rst
deleted file mode 100644
index 0d38ae14ea..0000000000
--- a/docs/source/pype.hosts.resolve.preload_console.rst
+++ /dev/null
@@ -1,7 +0,0 @@
-pype.hosts.resolve.preload\_console module
-==========================================
-
-.. automodule:: pype.hosts.resolve.preload_console
- :members:
- :undoc-members:
- :show-inheritance:
diff --git a/docs/source/pype.hosts.resolve.rst b/docs/source/pype.hosts.resolve.rst
deleted file mode 100644
index 368129e43e..0000000000
--- a/docs/source/pype.hosts.resolve.rst
+++ /dev/null
@@ -1,74 +0,0 @@
-pype.hosts.resolve package
-==========================
-
-.. automodule:: pype.hosts.resolve
- :members:
- :undoc-members:
- :show-inheritance:
-
-Submodules
-----------
-
-pype.hosts.resolve.action module
---------------------------------
-
-.. automodule:: pype.hosts.resolve.action
- :members:
- :undoc-members:
- :show-inheritance:
-
-pype.hosts.resolve.lib module
------------------------------
-
-.. automodule:: pype.hosts.resolve.lib
- :members:
- :undoc-members:
- :show-inheritance:
-
-pype.hosts.resolve.menu module
-------------------------------
-
-.. automodule:: pype.hosts.resolve.menu
- :members:
- :undoc-members:
- :show-inheritance:
-
-pype.hosts.resolve.pipeline module
-----------------------------------
-
-.. automodule:: pype.hosts.resolve.pipeline
- :members:
- :undoc-members:
- :show-inheritance:
-
-pype.hosts.resolve.plugin module
---------------------------------
-
-.. automodule:: pype.hosts.resolve.plugin
- :members:
- :undoc-members:
- :show-inheritance:
-
-pype.hosts.resolve.preload\_console module
-------------------------------------------
-
-.. automodule:: pype.hosts.resolve.preload_console
- :members:
- :undoc-members:
- :show-inheritance:
-
-pype.hosts.resolve.utils module
--------------------------------
-
-.. automodule:: pype.hosts.resolve.utils
- :members:
- :undoc-members:
- :show-inheritance:
-
-pype.hosts.resolve.workio module
---------------------------------
-
-.. automodule:: pype.hosts.resolve.workio
- :members:
- :undoc-members:
- :show-inheritance:
diff --git a/docs/source/pype.hosts.resolve.todo-rendering.rst b/docs/source/pype.hosts.resolve.todo-rendering.rst
deleted file mode 100644
index 8ea80183ce..0000000000
--- a/docs/source/pype.hosts.resolve.todo-rendering.rst
+++ /dev/null
@@ -1,7 +0,0 @@
-pype.hosts.resolve.todo\-rendering module
-=========================================
-
-.. automodule:: pype.hosts.resolve.todo-rendering
- :members:
- :undoc-members:
- :show-inheritance:
diff --git a/docs/source/pype.hosts.resolve.utils.rst b/docs/source/pype.hosts.resolve.utils.rst
deleted file mode 100644
index e390a5d026..0000000000
--- a/docs/source/pype.hosts.resolve.utils.rst
+++ /dev/null
@@ -1,7 +0,0 @@
-pype.hosts.resolve.utils module
-===============================
-
-.. automodule:: pype.hosts.resolve.utils
- :members:
- :undoc-members:
- :show-inheritance:
diff --git a/docs/source/pype.hosts.resolve.workio.rst b/docs/source/pype.hosts.resolve.workio.rst
deleted file mode 100644
index 5dceb99d64..0000000000
--- a/docs/source/pype.hosts.resolve.workio.rst
+++ /dev/null
@@ -1,7 +0,0 @@
-pype.hosts.resolve.workio module
-================================
-
-.. automodule:: pype.hosts.resolve.workio
- :members:
- :undoc-members:
- :show-inheritance:
diff --git a/docs/source/pype.hosts.rst b/docs/source/pype.hosts.rst
deleted file mode 100644
index e2d9121501..0000000000
--- a/docs/source/pype.hosts.rst
+++ /dev/null
@@ -1,26 +0,0 @@
-pype.hosts package
-==================
-
-.. automodule:: pype.hosts
- :members:
- :undoc-members:
- :show-inheritance:
-
-Subpackages
------------
-
-.. toctree::
- :maxdepth: 6
-
- pype.hosts.blender
- pype.hosts.celaction
- pype.hosts.fusion
- pype.hosts.harmony
- pype.hosts.houdini
- pype.hosts.maya
- pype.hosts.nuke
- pype.hosts.nukestudio
- pype.hosts.photoshop
- pype.hosts.premiere
- pype.hosts.resolve
- pype.hosts.unreal
diff --git a/docs/source/pype.hosts.tvpaint.api.rst b/docs/source/pype.hosts.tvpaint.api.rst
deleted file mode 100644
index 43273e8ec5..0000000000
--- a/docs/source/pype.hosts.tvpaint.api.rst
+++ /dev/null
@@ -1,7 +0,0 @@
-pype.hosts.tvpaint.api package
-==============================
-
-.. automodule:: pype.hosts.tvpaint.api
- :members:
- :undoc-members:
- :show-inheritance:
diff --git a/docs/source/pype.hosts.tvpaint.rst b/docs/source/pype.hosts.tvpaint.rst
deleted file mode 100644
index 561be3a9dc..0000000000
--- a/docs/source/pype.hosts.tvpaint.rst
+++ /dev/null
@@ -1,15 +0,0 @@
-pype.hosts.tvpaint package
-==========================
-
-.. automodule:: pype.hosts.tvpaint
- :members:
- :undoc-members:
- :show-inheritance:
-
-Subpackages
------------
-
-.. toctree::
- :maxdepth: 10
-
- pype.hosts.tvpaint.api
diff --git a/docs/source/pype.hosts.unreal.lib.rst b/docs/source/pype.hosts.unreal.lib.rst
deleted file mode 100644
index b891e71c47..0000000000
--- a/docs/source/pype.hosts.unreal.lib.rst
+++ /dev/null
@@ -1,7 +0,0 @@
-pype.hosts.unreal.lib module
-============================
-
-.. automodule:: pype.hosts.unreal.lib
- :members:
- :undoc-members:
- :show-inheritance:
diff --git a/docs/source/pype.hosts.unreal.plugin.rst b/docs/source/pype.hosts.unreal.plugin.rst
deleted file mode 100644
index e3ef81c7c7..0000000000
--- a/docs/source/pype.hosts.unreal.plugin.rst
+++ /dev/null
@@ -1,7 +0,0 @@
-pype.hosts.unreal.plugin module
-===============================
-
-.. automodule:: pype.hosts.unreal.plugin
- :members:
- :undoc-members:
- :show-inheritance:
diff --git a/docs/source/pype.hosts.unreal.rst b/docs/source/pype.hosts.unreal.rst
deleted file mode 100644
index f46140298b..0000000000
--- a/docs/source/pype.hosts.unreal.rst
+++ /dev/null
@@ -1,26 +0,0 @@
-pype.hosts.unreal package
-=========================
-
-.. automodule:: pype.hosts.unreal
- :members:
- :undoc-members:
- :show-inheritance:
-
-Submodules
-----------
-
-pype.hosts.unreal.lib module
-----------------------------
-
-.. automodule:: pype.hosts.unreal.lib
- :members:
- :undoc-members:
- :show-inheritance:
-
-pype.hosts.unreal.plugin module
--------------------------------
-
-.. automodule:: pype.hosts.unreal.plugin
- :members:
- :undoc-members:
- :show-inheritance:
diff --git a/docs/source/pype.launcher_actions.rst b/docs/source/pype.launcher_actions.rst
deleted file mode 100644
index c7525acbd1..0000000000
--- a/docs/source/pype.launcher_actions.rst
+++ /dev/null
@@ -1,7 +0,0 @@
-pype.launcher\_actions module
-=============================
-
-.. automodule:: pype.launcher_actions
- :members:
- :undoc-members:
- :show-inheritance:
diff --git a/docs/source/pype.lib.abstract_collect_render.rst b/docs/source/pype.lib.abstract_collect_render.rst
deleted file mode 100644
index d6adadc271..0000000000
--- a/docs/source/pype.lib.abstract_collect_render.rst
+++ /dev/null
@@ -1,7 +0,0 @@
-pype.lib.abstract\_collect\_render module
-=========================================
-
-.. automodule:: pype.lib.abstract_collect_render
- :members:
- :undoc-members:
- :show-inheritance:
diff --git a/docs/source/pype.lib.abstract_expected_files.rst b/docs/source/pype.lib.abstract_expected_files.rst
deleted file mode 100644
index 904aeb3375..0000000000
--- a/docs/source/pype.lib.abstract_expected_files.rst
+++ /dev/null
@@ -1,7 +0,0 @@
-pype.lib.abstract\_expected\_files module
-=========================================
-
-.. automodule:: pype.lib.abstract_expected_files
- :members:
- :undoc-members:
- :show-inheritance:
diff --git a/docs/source/pype.lib.abstract_metaplugins.rst b/docs/source/pype.lib.abstract_metaplugins.rst
deleted file mode 100644
index 9f2751b630..0000000000
--- a/docs/source/pype.lib.abstract_metaplugins.rst
+++ /dev/null
@@ -1,7 +0,0 @@
-pype.lib.abstract\_metaplugins module
-=====================================
-
-.. automodule:: pype.lib.abstract_metaplugins
- :members:
- :undoc-members:
- :show-inheritance:
diff --git a/docs/source/pype.lib.abstract_submit_deadline.rst b/docs/source/pype.lib.abstract_submit_deadline.rst
deleted file mode 100644
index a57222add3..0000000000
--- a/docs/source/pype.lib.abstract_submit_deadline.rst
+++ /dev/null
@@ -1,7 +0,0 @@
-pype.lib.abstract\_submit\_deadline module
-==========================================
-
-.. automodule:: pype.lib.abstract_submit_deadline
- :members:
- :undoc-members:
- :show-inheritance:
diff --git a/docs/source/pype.lib.anatomy.rst b/docs/source/pype.lib.anatomy.rst
deleted file mode 100644
index 7bddb37c8a..0000000000
--- a/docs/source/pype.lib.anatomy.rst
+++ /dev/null
@@ -1,7 +0,0 @@
-pype.lib.anatomy module
-=======================
-
-.. automodule:: pype.lib.anatomy
- :members:
- :undoc-members:
- :show-inheritance:
diff --git a/docs/source/pype.lib.applications.rst b/docs/source/pype.lib.applications.rst
deleted file mode 100644
index 8d1ff9b2c6..0000000000
--- a/docs/source/pype.lib.applications.rst
+++ /dev/null
@@ -1,7 +0,0 @@
-pype.lib.applications module
-============================
-
-.. automodule:: pype.lib.applications
- :members:
- :undoc-members:
- :show-inheritance:
diff --git a/docs/source/pype.lib.avalon_context.rst b/docs/source/pype.lib.avalon_context.rst
deleted file mode 100644
index 067ea3380f..0000000000
--- a/docs/source/pype.lib.avalon_context.rst
+++ /dev/null
@@ -1,7 +0,0 @@
-pype.lib.avalon\_context module
-===============================
-
-.. automodule:: pype.lib.avalon_context
- :members:
- :undoc-members:
- :show-inheritance:
diff --git a/docs/source/pype.lib.config.rst b/docs/source/pype.lib.config.rst
deleted file mode 100644
index ce4c13f4e7..0000000000
--- a/docs/source/pype.lib.config.rst
+++ /dev/null
@@ -1,7 +0,0 @@
-pype.lib.config module
-======================
-
-.. automodule:: pype.lib.config
- :members:
- :undoc-members:
- :show-inheritance:
diff --git a/docs/source/pype.lib.deprecated.rst b/docs/source/pype.lib.deprecated.rst
deleted file mode 100644
index ec5ee58d67..0000000000
--- a/docs/source/pype.lib.deprecated.rst
+++ /dev/null
@@ -1,7 +0,0 @@
-pype.lib.deprecated module
-==========================
-
-.. automodule:: pype.lib.deprecated
- :members:
- :undoc-members:
- :show-inheritance:
diff --git a/docs/source/pype.lib.editorial.rst b/docs/source/pype.lib.editorial.rst
deleted file mode 100644
index d32e495e51..0000000000
--- a/docs/source/pype.lib.editorial.rst
+++ /dev/null
@@ -1,7 +0,0 @@
-pype.lib.editorial module
-=========================
-
-.. automodule:: pype.lib.editorial
- :members:
- :undoc-members:
- :show-inheritance:
diff --git a/docs/source/pype.lib.env_tools.rst b/docs/source/pype.lib.env_tools.rst
deleted file mode 100644
index cb470207c8..0000000000
--- a/docs/source/pype.lib.env_tools.rst
+++ /dev/null
@@ -1,7 +0,0 @@
-pype.lib.env\_tools module
-==========================
-
-.. automodule:: pype.lib.env_tools
- :members:
- :undoc-members:
- :show-inheritance:
diff --git a/docs/source/pype.lib.execute.rst b/docs/source/pype.lib.execute.rst
deleted file mode 100644
index 82c4ef0ad8..0000000000
--- a/docs/source/pype.lib.execute.rst
+++ /dev/null
@@ -1,7 +0,0 @@
-pype.lib.execute module
-=======================
-
-.. automodule:: pype.lib.execute
- :members:
- :undoc-members:
- :show-inheritance:
diff --git a/docs/source/pype.lib.ffmpeg_utils.rst b/docs/source/pype.lib.ffmpeg_utils.rst
deleted file mode 100644
index 968a3f39c8..0000000000
--- a/docs/source/pype.lib.ffmpeg_utils.rst
+++ /dev/null
@@ -1,7 +0,0 @@
-pype.lib.ffmpeg\_utils module
-=============================
-
-.. automodule:: pype.lib.ffmpeg_utils
- :members:
- :undoc-members:
- :show-inheritance:
diff --git a/docs/source/pype.lib.git_progress.rst b/docs/source/pype.lib.git_progress.rst
deleted file mode 100644
index 017cf4c3c7..0000000000
--- a/docs/source/pype.lib.git_progress.rst
+++ /dev/null
@@ -1,7 +0,0 @@
-pype.lib.git\_progress module
-=============================
-
-.. automodule:: pype.lib.git_progress
- :members:
- :undoc-members:
- :show-inheritance:
diff --git a/docs/source/pype.lib.log.rst b/docs/source/pype.lib.log.rst
deleted file mode 100644
index 6282178850..0000000000
--- a/docs/source/pype.lib.log.rst
+++ /dev/null
@@ -1,7 +0,0 @@
-pype.lib.log module
-===================
-
-.. automodule:: pype.lib.log
- :members:
- :undoc-members:
- :show-inheritance:
diff --git a/docs/source/pype.lib.mongo.rst b/docs/source/pype.lib.mongo.rst
deleted file mode 100644
index 34fbc6af7f..0000000000
--- a/docs/source/pype.lib.mongo.rst
+++ /dev/null
@@ -1,7 +0,0 @@
-pype.lib.mongo module
-=====================
-
-.. automodule:: pype.lib.mongo
- :members:
- :undoc-members:
- :show-inheritance:
diff --git a/docs/source/pype.lib.path_tools.rst b/docs/source/pype.lib.path_tools.rst
deleted file mode 100644
index c19c41eea3..0000000000
--- a/docs/source/pype.lib.path_tools.rst
+++ /dev/null
@@ -1,7 +0,0 @@
-pype.lib.path\_tools module
-===========================
-
-.. automodule:: pype.lib.path_tools
- :members:
- :undoc-members:
- :show-inheritance:
diff --git a/docs/source/pype.lib.plugin_tools.rst b/docs/source/pype.lib.plugin_tools.rst
deleted file mode 100644
index 6eadc5d3be..0000000000
--- a/docs/source/pype.lib.plugin_tools.rst
+++ /dev/null
@@ -1,7 +0,0 @@
-pype.lib.plugin\_tools module
-=============================
-
-.. automodule:: pype.lib.plugin_tools
- :members:
- :undoc-members:
- :show-inheritance:
diff --git a/docs/source/pype.lib.profiling.rst b/docs/source/pype.lib.profiling.rst
deleted file mode 100644
index 1fded0c8fd..0000000000
--- a/docs/source/pype.lib.profiling.rst
+++ /dev/null
@@ -1,7 +0,0 @@
-pype.lib.profiling module
-=========================
-
-.. automodule:: pype.lib.profiling
- :members:
- :undoc-members:
- :show-inheritance:
diff --git a/docs/source/pype.lib.python_module_tools.rst b/docs/source/pype.lib.python_module_tools.rst
deleted file mode 100644
index c916080bce..0000000000
--- a/docs/source/pype.lib.python_module_tools.rst
+++ /dev/null
@@ -1,7 +0,0 @@
-pype.lib.python\_module\_tools module
-=====================================
-
-.. automodule:: pype.lib.python_module_tools
- :members:
- :undoc-members:
- :show-inheritance:
diff --git a/docs/source/pype.lib.rst b/docs/source/pype.lib.rst
deleted file mode 100644
index ea880eea3e..0000000000
--- a/docs/source/pype.lib.rst
+++ /dev/null
@@ -1,90 +0,0 @@
-pype.lib package
-================
-
-.. automodule:: pype.lib
- :members:
- :undoc-members:
- :show-inheritance:
-
-Submodules
-----------
-
-pype.lib.anatomy module
------------------------
-
-.. automodule:: pype.lib.anatomy
- :members:
- :undoc-members:
- :show-inheritance:
-
-pype.lib.config module
-----------------------
-
-.. automodule:: pype.lib.config
- :members:
- :undoc-members:
- :show-inheritance:
-
-pype.lib.execute module
------------------------
-
-.. automodule:: pype.lib.execute
- :members:
- :undoc-members:
- :show-inheritance:
-
-pype.lib.git\_progress module
------------------------------
-
-.. automodule:: pype.lib.git_progress
- :members:
- :undoc-members:
- :show-inheritance:
-
-pype.lib.lib module
--------------------
-
-.. automodule:: pype.lib.lib
- :members:
- :undoc-members:
- :show-inheritance:
-
-pype.lib.log module
--------------------
-
-.. automodule:: pype.lib.log
- :members:
- :undoc-members:
- :show-inheritance:
-
-pype.lib.mongo module
----------------------
-
-.. automodule:: pype.lib.mongo
- :members:
- :undoc-members:
- :show-inheritance:
-
-pype.lib.profiling module
--------------------------
-
-.. automodule:: pype.lib.profiling
- :members:
- :undoc-members:
- :show-inheritance:
-
-pype.lib.terminal module
-------------------------
-
-.. automodule:: pype.lib.terminal
- :members:
- :undoc-members:
- :show-inheritance:
-
-pype.lib.user\_settings module
-------------------------------
-
-.. automodule:: pype.lib.user_settings
- :members:
- :undoc-members:
- :show-inheritance:
diff --git a/docs/source/pype.lib.terminal.rst b/docs/source/pype.lib.terminal.rst
deleted file mode 100644
index dafe1d8f69..0000000000
--- a/docs/source/pype.lib.terminal.rst
+++ /dev/null
@@ -1,7 +0,0 @@
-pype.lib.terminal module
-========================
-
-.. automodule:: pype.lib.terminal
- :members:
- :undoc-members:
- :show-inheritance:
diff --git a/docs/source/pype.lib.terminal_splash.rst b/docs/source/pype.lib.terminal_splash.rst
deleted file mode 100644
index 06038f0f09..0000000000
--- a/docs/source/pype.lib.terminal_splash.rst
+++ /dev/null
@@ -1,7 +0,0 @@
-pype.lib.terminal\_splash module
-================================
-
-.. automodule:: pype.lib.terminal_splash
- :members:
- :undoc-members:
- :show-inheritance:
diff --git a/docs/source/pype.lib.user_settings.rst b/docs/source/pype.lib.user_settings.rst
deleted file mode 100644
index 7b4e8ced78..0000000000
--- a/docs/source/pype.lib.user_settings.rst
+++ /dev/null
@@ -1,7 +0,0 @@
-pype.lib.user\_settings module
-==============================
-
-.. automodule:: pype.lib.user_settings
- :members:
- :undoc-members:
- :show-inheritance:
diff --git a/docs/source/pype.modules.adobe_communicator.adobe_comunicator.rst b/docs/source/pype.modules.adobe_communicator.adobe_comunicator.rst
deleted file mode 100644
index aadbaa0dc5..0000000000
--- a/docs/source/pype.modules.adobe_communicator.adobe_comunicator.rst
+++ /dev/null
@@ -1,7 +0,0 @@
-pype.modules.adobe\_communicator.adobe\_comunicator module
-==========================================================
-
-.. automodule:: pype.modules.adobe_communicator.adobe_comunicator
- :members:
- :undoc-members:
- :show-inheritance:
diff --git a/docs/source/pype.modules.adobe_communicator.lib.publish.rst b/docs/source/pype.modules.adobe_communicator.lib.publish.rst
deleted file mode 100644
index a16bf1dd0a..0000000000
--- a/docs/source/pype.modules.adobe_communicator.lib.publish.rst
+++ /dev/null
@@ -1,7 +0,0 @@
-pype.modules.adobe\_communicator.lib.publish module
-===================================================
-
-.. automodule:: pype.modules.adobe_communicator.lib.publish
- :members:
- :undoc-members:
- :show-inheritance:
diff --git a/docs/source/pype.modules.adobe_communicator.lib.rest_api.rst b/docs/source/pype.modules.adobe_communicator.lib.rest_api.rst
deleted file mode 100644
index 457bebef99..0000000000
--- a/docs/source/pype.modules.adobe_communicator.lib.rest_api.rst
+++ /dev/null
@@ -1,7 +0,0 @@
-pype.modules.adobe\_communicator.lib.rest\_api module
-=====================================================
-
-.. automodule:: pype.modules.adobe_communicator.lib.rest_api
- :members:
- :undoc-members:
- :show-inheritance:
diff --git a/docs/source/pype.modules.adobe_communicator.lib.rst b/docs/source/pype.modules.adobe_communicator.lib.rst
deleted file mode 100644
index cdec4ce80e..0000000000
--- a/docs/source/pype.modules.adobe_communicator.lib.rst
+++ /dev/null
@@ -1,26 +0,0 @@
-pype.modules.adobe\_communicator.lib package
-============================================
-
-.. automodule:: pype.modules.adobe_communicator.lib
- :members:
- :undoc-members:
- :show-inheritance:
-
-Submodules
-----------
-
-pype.modules.adobe\_communicator.lib.publish module
----------------------------------------------------
-
-.. automodule:: pype.modules.adobe_communicator.lib.publish
- :members:
- :undoc-members:
- :show-inheritance:
-
-pype.modules.adobe\_communicator.lib.rest\_api module
------------------------------------------------------
-
-.. automodule:: pype.modules.adobe_communicator.lib.rest_api
- :members:
- :undoc-members:
- :show-inheritance:
diff --git a/docs/source/pype.modules.adobe_communicator.rst b/docs/source/pype.modules.adobe_communicator.rst
deleted file mode 100644
index f2fa40ced4..0000000000
--- a/docs/source/pype.modules.adobe_communicator.rst
+++ /dev/null
@@ -1,26 +0,0 @@
-pype.modules.adobe\_communicator package
-========================================
-
-.. automodule:: pype.modules.adobe_communicator
- :members:
- :undoc-members:
- :show-inheritance:
-
-Subpackages
------------
-
-.. toctree::
- :maxdepth: 6
-
- pype.modules.adobe_communicator.lib
-
-Submodules
-----------
-
-pype.modules.adobe\_communicator.adobe\_comunicator module
-----------------------------------------------------------
-
-.. automodule:: pype.modules.adobe_communicator.adobe_comunicator
- :members:
- :undoc-members:
- :show-inheritance:
diff --git a/docs/source/pype.modules.avalon_apps.avalon_app.rst b/docs/source/pype.modules.avalon_apps.avalon_app.rst
deleted file mode 100644
index 43f467e748..0000000000
--- a/docs/source/pype.modules.avalon_apps.avalon_app.rst
+++ /dev/null
@@ -1,7 +0,0 @@
-pype.modules.avalon\_apps.avalon\_app module
-============================================
-
-.. automodule:: pype.modules.avalon_apps.avalon_app
- :members:
- :undoc-members:
- :show-inheritance:
diff --git a/docs/source/pype.modules.avalon_apps.rest_api.rst b/docs/source/pype.modules.avalon_apps.rest_api.rst
deleted file mode 100644
index d89c979311..0000000000
--- a/docs/source/pype.modules.avalon_apps.rest_api.rst
+++ /dev/null
@@ -1,7 +0,0 @@
-pype.modules.avalon\_apps.rest\_api module
-==========================================
-
-.. automodule:: pype.modules.avalon_apps.rest_api
- :members:
- :undoc-members:
- :show-inheritance:
diff --git a/docs/source/pype.modules.avalon_apps.rst b/docs/source/pype.modules.avalon_apps.rst
deleted file mode 100644
index 4755eddae6..0000000000
--- a/docs/source/pype.modules.avalon_apps.rst
+++ /dev/null
@@ -1,26 +0,0 @@
-pype.modules.avalon\_apps package
-=================================
-
-.. automodule:: pype.modules.avalon_apps
- :members:
- :undoc-members:
- :show-inheritance:
-
-Submodules
-----------
-
-pype.modules.avalon\_apps.avalon\_app module
---------------------------------------------
-
-.. automodule:: pype.modules.avalon_apps.avalon_app
- :members:
- :undoc-members:
- :show-inheritance:
-
-pype.modules.avalon\_apps.rest\_api module
-------------------------------------------
-
-.. automodule:: pype.modules.avalon_apps.rest_api
- :members:
- :undoc-members:
- :show-inheritance:
diff --git a/docs/source/pype.modules.base.rst b/docs/source/pype.modules.base.rst
deleted file mode 100644
index 7cd3cfbd44..0000000000
--- a/docs/source/pype.modules.base.rst
+++ /dev/null
@@ -1,7 +0,0 @@
-pype.modules.base module
-========================
-
-.. automodule:: pype.modules.base
- :members:
- :undoc-members:
- :show-inheritance:
diff --git a/docs/source/pype.modules.clockify.clockify.rst b/docs/source/pype.modules.clockify.clockify.rst
deleted file mode 100644
index a3deaab81d..0000000000
--- a/docs/source/pype.modules.clockify.clockify.rst
+++ /dev/null
@@ -1,7 +0,0 @@
-pype.modules.clockify.clockify module
-=====================================
-
-.. automodule:: pype.modules.clockify.clockify
- :members:
- :undoc-members:
- :show-inheritance:
diff --git a/docs/source/pype.modules.clockify.clockify_api.rst b/docs/source/pype.modules.clockify.clockify_api.rst
deleted file mode 100644
index 2facc550c5..0000000000
--- a/docs/source/pype.modules.clockify.clockify_api.rst
+++ /dev/null
@@ -1,7 +0,0 @@
-pype.modules.clockify.clockify\_api module
-==========================================
-
-.. automodule:: pype.modules.clockify.clockify_api
- :members:
- :undoc-members:
- :show-inheritance:
diff --git a/docs/source/pype.modules.clockify.clockify_module.rst b/docs/source/pype.modules.clockify.clockify_module.rst
deleted file mode 100644
index 85f8e75ad1..0000000000
--- a/docs/source/pype.modules.clockify.clockify_module.rst
+++ /dev/null
@@ -1,7 +0,0 @@
-pype.modules.clockify.clockify\_module module
-=============================================
-
-.. automodule:: pype.modules.clockify.clockify_module
- :members:
- :undoc-members:
- :show-inheritance:
diff --git a/docs/source/pype.modules.clockify.constants.rst b/docs/source/pype.modules.clockify.constants.rst
deleted file mode 100644
index e30a073bfc..0000000000
--- a/docs/source/pype.modules.clockify.constants.rst
+++ /dev/null
@@ -1,7 +0,0 @@
-pype.modules.clockify.constants module
-======================================
-
-.. automodule:: pype.modules.clockify.constants
- :members:
- :undoc-members:
- :show-inheritance:
diff --git a/docs/source/pype.modules.clockify.rst b/docs/source/pype.modules.clockify.rst
deleted file mode 100644
index 550ba049c2..0000000000
--- a/docs/source/pype.modules.clockify.rst
+++ /dev/null
@@ -1,42 +0,0 @@
-pype.modules.clockify package
-=============================
-
-.. automodule:: pype.modules.clockify
- :members:
- :undoc-members:
- :show-inheritance:
-
-Submodules
-----------
-
-pype.modules.clockify.clockify module
--------------------------------------
-
-.. automodule:: pype.modules.clockify.clockify
- :members:
- :undoc-members:
- :show-inheritance:
-
-pype.modules.clockify.clockify\_api module
-------------------------------------------
-
-.. automodule:: pype.modules.clockify.clockify_api
- :members:
- :undoc-members:
- :show-inheritance:
-
-pype.modules.clockify.constants module
---------------------------------------
-
-.. automodule:: pype.modules.clockify.constants
- :members:
- :undoc-members:
- :show-inheritance:
-
-pype.modules.clockify.widgets module
-------------------------------------
-
-.. automodule:: pype.modules.clockify.widgets
- :members:
- :undoc-members:
- :show-inheritance:
diff --git a/docs/source/pype.modules.clockify.widgets.rst b/docs/source/pype.modules.clockify.widgets.rst
deleted file mode 100644
index e9809fb048..0000000000
--- a/docs/source/pype.modules.clockify.widgets.rst
+++ /dev/null
@@ -1,7 +0,0 @@
-pype.modules.clockify.widgets module
-====================================
-
-.. automodule:: pype.modules.clockify.widgets
- :members:
- :undoc-members:
- :show-inheritance:
diff --git a/docs/source/pype.modules.deadline.deadline_module.rst b/docs/source/pype.modules.deadline.deadline_module.rst
deleted file mode 100644
index 43e7198a8b..0000000000
--- a/docs/source/pype.modules.deadline.deadline_module.rst
+++ /dev/null
@@ -1,7 +0,0 @@
-pype.modules.deadline.deadline\_module module
-=============================================
-
-.. automodule:: pype.modules.deadline.deadline_module
- :members:
- :undoc-members:
- :show-inheritance:
diff --git a/docs/source/pype.modules.deadline.rst b/docs/source/pype.modules.deadline.rst
deleted file mode 100644
index 7633b2b950..0000000000
--- a/docs/source/pype.modules.deadline.rst
+++ /dev/null
@@ -1,15 +0,0 @@
-pype.modules.deadline package
-=============================
-
-.. automodule:: pype.modules.deadline
- :members:
- :undoc-members:
- :show-inheritance:
-
-Submodules
-----------
-
-.. toctree::
- :maxdepth: 10
-
- pype.modules.deadline.deadline_module
diff --git a/docs/source/pype.modules.ftrack.ftrack_module.rst b/docs/source/pype.modules.ftrack.ftrack_module.rst
deleted file mode 100644
index 4188ffbed8..0000000000
--- a/docs/source/pype.modules.ftrack.ftrack_module.rst
+++ /dev/null
@@ -1,7 +0,0 @@
-pype.modules.ftrack.ftrack\_module module
-=========================================
-
-.. automodule:: pype.modules.ftrack.ftrack_module
- :members:
- :undoc-members:
- :show-inheritance:
diff --git a/docs/source/pype.modules.ftrack.ftrack_server.custom_db_connector.rst b/docs/source/pype.modules.ftrack.ftrack_server.custom_db_connector.rst
deleted file mode 100644
index b42c3e054d..0000000000
--- a/docs/source/pype.modules.ftrack.ftrack_server.custom_db_connector.rst
+++ /dev/null
@@ -1,7 +0,0 @@
-pype.modules.ftrack.ftrack\_server.custom\_db\_connector module
-===============================================================
-
-.. automodule:: pype.modules.ftrack.ftrack_server.custom_db_connector
- :members:
- :undoc-members:
- :show-inheritance:
diff --git a/docs/source/pype.modules.ftrack.ftrack_server.event_server_cli.rst b/docs/source/pype.modules.ftrack.ftrack_server.event_server_cli.rst
deleted file mode 100644
index d6404f965c..0000000000
--- a/docs/source/pype.modules.ftrack.ftrack_server.event_server_cli.rst
+++ /dev/null
@@ -1,7 +0,0 @@
-pype.modules.ftrack.ftrack\_server.event\_server\_cli module
-============================================================
-
-.. automodule:: pype.modules.ftrack.ftrack_server.event_server_cli
- :members:
- :undoc-members:
- :show-inheritance:
diff --git a/docs/source/pype.modules.ftrack.ftrack_server.ftrack_server.rst b/docs/source/pype.modules.ftrack.ftrack_server.ftrack_server.rst
deleted file mode 100644
index af2783c263..0000000000
--- a/docs/source/pype.modules.ftrack.ftrack_server.ftrack_server.rst
+++ /dev/null
@@ -1,7 +0,0 @@
-pype.modules.ftrack.ftrack\_server.ftrack\_server module
-========================================================
-
-.. automodule:: pype.modules.ftrack.ftrack_server.ftrack_server
- :members:
- :undoc-members:
- :show-inheritance:
diff --git a/docs/source/pype.modules.ftrack.ftrack_server.lib.rst b/docs/source/pype.modules.ftrack.ftrack_server.lib.rst
deleted file mode 100644
index 2ac4cef517..0000000000
--- a/docs/source/pype.modules.ftrack.ftrack_server.lib.rst
+++ /dev/null
@@ -1,7 +0,0 @@
-pype.modules.ftrack.ftrack\_server.lib module
-=============================================
-
-.. automodule:: pype.modules.ftrack.ftrack_server.lib
- :members:
- :undoc-members:
- :show-inheritance:
diff --git a/docs/source/pype.modules.ftrack.ftrack_server.rst b/docs/source/pype.modules.ftrack.ftrack_server.rst
deleted file mode 100644
index 417acc1a45..0000000000
--- a/docs/source/pype.modules.ftrack.ftrack_server.rst
+++ /dev/null
@@ -1,90 +0,0 @@
-pype.modules.ftrack.ftrack\_server package
-==========================================
-
-.. automodule:: pype.modules.ftrack.ftrack_server
- :members:
- :undoc-members:
- :show-inheritance:
-
-Submodules
-----------
-
-pype.modules.ftrack.ftrack\_server.custom\_db\_connector module
----------------------------------------------------------------
-
-.. automodule:: pype.modules.ftrack.ftrack_server.custom_db_connector
- :members:
- :undoc-members:
- :show-inheritance:
-
-pype.modules.ftrack.ftrack\_server.event\_server\_cli module
-------------------------------------------------------------
-
-.. automodule:: pype.modules.ftrack.ftrack_server.event_server_cli
- :members:
- :undoc-members:
- :show-inheritance:
-
-pype.modules.ftrack.ftrack\_server.ftrack\_server module
---------------------------------------------------------
-
-.. automodule:: pype.modules.ftrack.ftrack_server.ftrack_server
- :members:
- :undoc-members:
- :show-inheritance:
-
-pype.modules.ftrack.ftrack\_server.lib module
----------------------------------------------
-
-.. automodule:: pype.modules.ftrack.ftrack_server.lib
- :members:
- :undoc-members:
- :show-inheritance:
-
-pype.modules.ftrack.ftrack\_server.socket\_thread module
---------------------------------------------------------
-
-.. automodule:: pype.modules.ftrack.ftrack_server.socket_thread
- :members:
- :undoc-members:
- :show-inheritance:
-
-pype.modules.ftrack.ftrack\_server.sub\_event\_processor module
----------------------------------------------------------------
-
-.. automodule:: pype.modules.ftrack.ftrack_server.sub_event_processor
- :members:
- :undoc-members:
- :show-inheritance:
-
-pype.modules.ftrack.ftrack\_server.sub\_event\_status module
-------------------------------------------------------------
-
-.. automodule:: pype.modules.ftrack.ftrack_server.sub_event_status
- :members:
- :undoc-members:
- :show-inheritance:
-
-pype.modules.ftrack.ftrack\_server.sub\_event\_storer module
-------------------------------------------------------------
-
-.. automodule:: pype.modules.ftrack.ftrack_server.sub_event_storer
- :members:
- :undoc-members:
- :show-inheritance:
-
-pype.modules.ftrack.ftrack\_server.sub\_legacy\_server module
--------------------------------------------------------------
-
-.. automodule:: pype.modules.ftrack.ftrack_server.sub_legacy_server
- :members:
- :undoc-members:
- :show-inheritance:
-
-pype.modules.ftrack.ftrack\_server.sub\_user\_server module
------------------------------------------------------------
-
-.. automodule:: pype.modules.ftrack.ftrack_server.sub_user_server
- :members:
- :undoc-members:
- :show-inheritance:
diff --git a/docs/source/pype.modules.ftrack.ftrack_server.socket_thread.rst b/docs/source/pype.modules.ftrack.ftrack_server.socket_thread.rst
deleted file mode 100644
index d8d24a8288..0000000000
--- a/docs/source/pype.modules.ftrack.ftrack_server.socket_thread.rst
+++ /dev/null
@@ -1,7 +0,0 @@
-pype.modules.ftrack.ftrack\_server.socket\_thread module
-========================================================
-
-.. automodule:: pype.modules.ftrack.ftrack_server.socket_thread
- :members:
- :undoc-members:
- :show-inheritance:
diff --git a/docs/source/pype.modules.ftrack.ftrack_server.sub_event_processor.rst b/docs/source/pype.modules.ftrack.ftrack_server.sub_event_processor.rst
deleted file mode 100644
index 04f863e347..0000000000
--- a/docs/source/pype.modules.ftrack.ftrack_server.sub_event_processor.rst
+++ /dev/null
@@ -1,7 +0,0 @@
-pype.modules.ftrack.ftrack\_server.sub\_event\_processor module
-===============================================================
-
-.. automodule:: pype.modules.ftrack.ftrack_server.sub_event_processor
- :members:
- :undoc-members:
- :show-inheritance:
diff --git a/docs/source/pype.modules.ftrack.ftrack_server.sub_event_status.rst b/docs/source/pype.modules.ftrack.ftrack_server.sub_event_status.rst
deleted file mode 100644
index 876b7313cf..0000000000
--- a/docs/source/pype.modules.ftrack.ftrack_server.sub_event_status.rst
+++ /dev/null
@@ -1,7 +0,0 @@
-pype.modules.ftrack.ftrack\_server.sub\_event\_status module
-============================================================
-
-.. automodule:: pype.modules.ftrack.ftrack_server.sub_event_status
- :members:
- :undoc-members:
- :show-inheritance:
diff --git a/docs/source/pype.modules.ftrack.ftrack_server.sub_event_storer.rst b/docs/source/pype.modules.ftrack.ftrack_server.sub_event_storer.rst
deleted file mode 100644
index 3d2d400d55..0000000000
--- a/docs/source/pype.modules.ftrack.ftrack_server.sub_event_storer.rst
+++ /dev/null
@@ -1,7 +0,0 @@
-pype.modules.ftrack.ftrack\_server.sub\_event\_storer module
-============================================================
-
-.. automodule:: pype.modules.ftrack.ftrack_server.sub_event_storer
- :members:
- :undoc-members:
- :show-inheritance:
diff --git a/docs/source/pype.modules.ftrack.ftrack_server.sub_legacy_server.rst b/docs/source/pype.modules.ftrack.ftrack_server.sub_legacy_server.rst
deleted file mode 100644
index d25cdfe8de..0000000000
--- a/docs/source/pype.modules.ftrack.ftrack_server.sub_legacy_server.rst
+++ /dev/null
@@ -1,7 +0,0 @@
-pype.modules.ftrack.ftrack\_server.sub\_legacy\_server module
-=============================================================
-
-.. automodule:: pype.modules.ftrack.ftrack_server.sub_legacy_server
- :members:
- :undoc-members:
- :show-inheritance:
diff --git a/docs/source/pype.modules.ftrack.ftrack_server.sub_user_server.rst b/docs/source/pype.modules.ftrack.ftrack_server.sub_user_server.rst
deleted file mode 100644
index c13095d5f1..0000000000
--- a/docs/source/pype.modules.ftrack.ftrack_server.sub_user_server.rst
+++ /dev/null
@@ -1,7 +0,0 @@
-pype.modules.ftrack.ftrack\_server.sub\_user\_server module
-===========================================================
-
-.. automodule:: pype.modules.ftrack.ftrack_server.sub_user_server
- :members:
- :undoc-members:
- :show-inheritance:
diff --git a/docs/source/pype.modules.ftrack.lib.avalon_sync.rst b/docs/source/pype.modules.ftrack.lib.avalon_sync.rst
deleted file mode 100644
index 954ec4d911..0000000000
--- a/docs/source/pype.modules.ftrack.lib.avalon_sync.rst
+++ /dev/null
@@ -1,7 +0,0 @@
-pype.modules.ftrack.lib.avalon\_sync module
-===========================================
-
-.. automodule:: pype.modules.ftrack.lib.avalon_sync
- :members:
- :undoc-members:
- :show-inheritance:
diff --git a/docs/source/pype.modules.ftrack.lib.credentials.rst b/docs/source/pype.modules.ftrack.lib.credentials.rst
deleted file mode 100644
index 3965dc406d..0000000000
--- a/docs/source/pype.modules.ftrack.lib.credentials.rst
+++ /dev/null
@@ -1,7 +0,0 @@
-pype.modules.ftrack.lib.credentials module
-==========================================
-
-.. automodule:: pype.modules.ftrack.lib.credentials
- :members:
- :undoc-members:
- :show-inheritance:
diff --git a/docs/source/pype.modules.ftrack.lib.ftrack_action_handler.rst b/docs/source/pype.modules.ftrack.lib.ftrack_action_handler.rst
deleted file mode 100644
index cec38f9b8a..0000000000
--- a/docs/source/pype.modules.ftrack.lib.ftrack_action_handler.rst
+++ /dev/null
@@ -1,7 +0,0 @@
-pype.modules.ftrack.lib.ftrack\_action\_handler module
-======================================================
-
-.. automodule:: pype.modules.ftrack.lib.ftrack_action_handler
- :members:
- :undoc-members:
- :show-inheritance:
diff --git a/docs/source/pype.modules.ftrack.lib.ftrack_app_handler.rst b/docs/source/pype.modules.ftrack.lib.ftrack_app_handler.rst
deleted file mode 100644
index 1f7395927d..0000000000
--- a/docs/source/pype.modules.ftrack.lib.ftrack_app_handler.rst
+++ /dev/null
@@ -1,7 +0,0 @@
-pype.modules.ftrack.lib.ftrack\_app\_handler module
-===================================================
-
-.. automodule:: pype.modules.ftrack.lib.ftrack_app_handler
- :members:
- :undoc-members:
- :show-inheritance:
diff --git a/docs/source/pype.modules.ftrack.lib.ftrack_base_handler.rst b/docs/source/pype.modules.ftrack.lib.ftrack_base_handler.rst
deleted file mode 100644
index 94fab7c940..0000000000
--- a/docs/source/pype.modules.ftrack.lib.ftrack_base_handler.rst
+++ /dev/null
@@ -1,7 +0,0 @@
-pype.modules.ftrack.lib.ftrack\_base\_handler module
-====================================================
-
-.. automodule:: pype.modules.ftrack.lib.ftrack_base_handler
- :members:
- :undoc-members:
- :show-inheritance:
diff --git a/docs/source/pype.modules.ftrack.lib.ftrack_event_handler.rst b/docs/source/pype.modules.ftrack.lib.ftrack_event_handler.rst
deleted file mode 100644
index 0b57219b50..0000000000
--- a/docs/source/pype.modules.ftrack.lib.ftrack_event_handler.rst
+++ /dev/null
@@ -1,7 +0,0 @@
-pype.modules.ftrack.lib.ftrack\_event\_handler module
-=====================================================
-
-.. automodule:: pype.modules.ftrack.lib.ftrack_event_handler
- :members:
- :undoc-members:
- :show-inheritance:
diff --git a/docs/source/pype.modules.ftrack.lib.rst b/docs/source/pype.modules.ftrack.lib.rst
deleted file mode 100644
index 32a219ab3a..0000000000
--- a/docs/source/pype.modules.ftrack.lib.rst
+++ /dev/null
@@ -1,58 +0,0 @@
-pype.modules.ftrack.lib package
-===============================
-
-.. automodule:: pype.modules.ftrack.lib
- :members:
- :undoc-members:
- :show-inheritance:
-
-Submodules
-----------
-
-pype.modules.ftrack.lib.avalon\_sync module
--------------------------------------------
-
-.. automodule:: pype.modules.ftrack.lib.avalon_sync
- :members:
- :undoc-members:
- :show-inheritance:
-
-pype.modules.ftrack.lib.credentials module
-------------------------------------------
-
-.. automodule:: pype.modules.ftrack.lib.credentials
- :members:
- :undoc-members:
- :show-inheritance:
-
-pype.modules.ftrack.lib.ftrack\_action\_handler module
-------------------------------------------------------
-
-.. automodule:: pype.modules.ftrack.lib.ftrack_action_handler
- :members:
- :undoc-members:
- :show-inheritance:
-
-pype.modules.ftrack.lib.ftrack\_app\_handler module
----------------------------------------------------
-
-.. automodule:: pype.modules.ftrack.lib.ftrack_app_handler
- :members:
- :undoc-members:
- :show-inheritance:
-
-pype.modules.ftrack.lib.ftrack\_base\_handler module
-----------------------------------------------------
-
-.. automodule:: pype.modules.ftrack.lib.ftrack_base_handler
- :members:
- :undoc-members:
- :show-inheritance:
-
-pype.modules.ftrack.lib.ftrack\_event\_handler module
------------------------------------------------------
-
-.. automodule:: pype.modules.ftrack.lib.ftrack_event_handler
- :members:
- :undoc-members:
- :show-inheritance:
diff --git a/docs/source/pype.modules.ftrack.lib.settings.rst b/docs/source/pype.modules.ftrack.lib.settings.rst
deleted file mode 100644
index 255d52178a..0000000000
--- a/docs/source/pype.modules.ftrack.lib.settings.rst
+++ /dev/null
@@ -1,7 +0,0 @@
-pype.modules.ftrack.lib.settings module
-=======================================
-
-.. automodule:: pype.modules.ftrack.lib.settings
- :members:
- :undoc-members:
- :show-inheritance:
diff --git a/docs/source/pype.modules.ftrack.rst b/docs/source/pype.modules.ftrack.rst
deleted file mode 100644
index 13a92db808..0000000000
--- a/docs/source/pype.modules.ftrack.rst
+++ /dev/null
@@ -1,17 +0,0 @@
-pype.modules.ftrack package
-===========================
-
-.. automodule:: pype.modules.ftrack
- :members:
- :undoc-members:
- :show-inheritance:
-
-Subpackages
------------
-
-.. toctree::
- :maxdepth: 6
-
- pype.modules.ftrack.ftrack_server
- pype.modules.ftrack.lib
- pype.modules.ftrack.tray
diff --git a/docs/source/pype.modules.ftrack.tray.ftrack_module.rst b/docs/source/pype.modules.ftrack.tray.ftrack_module.rst
deleted file mode 100644
index c4a370472c..0000000000
--- a/docs/source/pype.modules.ftrack.tray.ftrack_module.rst
+++ /dev/null
@@ -1,7 +0,0 @@
-pype.modules.ftrack.tray.ftrack\_module module
-==============================================
-
-.. automodule:: pype.modules.ftrack.tray.ftrack_module
- :members:
- :undoc-members:
- :show-inheritance:
diff --git a/docs/source/pype.modules.ftrack.tray.ftrack_tray.rst b/docs/source/pype.modules.ftrack.tray.ftrack_tray.rst
deleted file mode 100644
index 147647e9b4..0000000000
--- a/docs/source/pype.modules.ftrack.tray.ftrack_tray.rst
+++ /dev/null
@@ -1,7 +0,0 @@
-pype.modules.ftrack.tray.ftrack\_tray module
-============================================
-
-.. automodule:: pype.modules.ftrack.tray.ftrack_tray
- :members:
- :undoc-members:
- :show-inheritance:
diff --git a/docs/source/pype.modules.ftrack.tray.login_dialog.rst b/docs/source/pype.modules.ftrack.tray.login_dialog.rst
deleted file mode 100644
index dabc2e73a7..0000000000
--- a/docs/source/pype.modules.ftrack.tray.login_dialog.rst
+++ /dev/null
@@ -1,7 +0,0 @@
-pype.modules.ftrack.tray.login\_dialog module
-=============================================
-
-.. automodule:: pype.modules.ftrack.tray.login_dialog
- :members:
- :undoc-members:
- :show-inheritance:
diff --git a/docs/source/pype.modules.ftrack.tray.login_tools.rst b/docs/source/pype.modules.ftrack.tray.login_tools.rst
deleted file mode 100644
index 00ec690866..0000000000
--- a/docs/source/pype.modules.ftrack.tray.login_tools.rst
+++ /dev/null
@@ -1,7 +0,0 @@
-pype.modules.ftrack.tray.login\_tools module
-============================================
-
-.. automodule:: pype.modules.ftrack.tray.login_tools
- :members:
- :undoc-members:
- :show-inheritance:
diff --git a/docs/source/pype.modules.ftrack.tray.rst b/docs/source/pype.modules.ftrack.tray.rst
deleted file mode 100644
index 79772a9c3b..0000000000
--- a/docs/source/pype.modules.ftrack.tray.rst
+++ /dev/null
@@ -1,34 +0,0 @@
-pype.modules.ftrack.tray package
-================================
-
-.. automodule:: pype.modules.ftrack.tray
- :members:
- :undoc-members:
- :show-inheritance:
-
-Submodules
-----------
-
-pype.modules.ftrack.tray.ftrack\_module module
-----------------------------------------------
-
-.. automodule:: pype.modules.ftrack.tray.ftrack_module
- :members:
- :undoc-members:
- :show-inheritance:
-
-pype.modules.ftrack.tray.login\_dialog module
----------------------------------------------
-
-.. automodule:: pype.modules.ftrack.tray.login_dialog
- :members:
- :undoc-members:
- :show-inheritance:
-
-pype.modules.ftrack.tray.login\_tools module
---------------------------------------------
-
-.. automodule:: pype.modules.ftrack.tray.login_tools
- :members:
- :undoc-members:
- :show-inheritance:
diff --git a/docs/source/pype.modules.idle_manager.idle_manager.rst b/docs/source/pype.modules.idle_manager.idle_manager.rst
deleted file mode 100644
index 8e93f97e6b..0000000000
--- a/docs/source/pype.modules.idle_manager.idle_manager.rst
+++ /dev/null
@@ -1,7 +0,0 @@
-pype.modules.idle\_manager.idle\_manager module
-===============================================
-
-.. automodule:: pype.modules.idle_manager.idle_manager
- :members:
- :undoc-members:
- :show-inheritance:
diff --git a/docs/source/pype.modules.idle_manager.rst b/docs/source/pype.modules.idle_manager.rst
deleted file mode 100644
index a3f7922999..0000000000
--- a/docs/source/pype.modules.idle_manager.rst
+++ /dev/null
@@ -1,18 +0,0 @@
-pype.modules.idle\_manager package
-==================================
-
-.. automodule:: pype.modules.idle_manager
- :members:
- :undoc-members:
- :show-inheritance:
-
-Submodules
-----------
-
-pype.modules.idle\_manager.idle\_manager module
------------------------------------------------
-
-.. automodule:: pype.modules.idle_manager.idle_manager
- :members:
- :undoc-members:
- :show-inheritance:
diff --git a/docs/source/pype.modules.launcher_action.rst b/docs/source/pype.modules.launcher_action.rst
deleted file mode 100644
index a63408e747..0000000000
--- a/docs/source/pype.modules.launcher_action.rst
+++ /dev/null
@@ -1,7 +0,0 @@
-pype.modules.launcher\_action module
-====================================
-
-.. automodule:: pype.modules.launcher_action
- :members:
- :undoc-members:
- :show-inheritance:
diff --git a/docs/source/pype.modules.log_viewer.log_view_module.rst b/docs/source/pype.modules.log_viewer.log_view_module.rst
deleted file mode 100644
index 8d80170a9c..0000000000
--- a/docs/source/pype.modules.log_viewer.log_view_module.rst
+++ /dev/null
@@ -1,7 +0,0 @@
-pype.modules.log\_viewer.log\_view\_module module
-=================================================
-
-.. automodule:: pype.modules.log_viewer.log_view_module
- :members:
- :undoc-members:
- :show-inheritance:
diff --git a/docs/source/pype.modules.log_viewer.rst b/docs/source/pype.modules.log_viewer.rst
deleted file mode 100644
index e275d56086..0000000000
--- a/docs/source/pype.modules.log_viewer.rst
+++ /dev/null
@@ -1,23 +0,0 @@
-pype.modules.log\_viewer package
-================================
-
-.. automodule:: pype.modules.log_viewer
- :members:
- :undoc-members:
- :show-inheritance:
-
-Subpackages
------------
-
-.. toctree::
- :maxdepth: 10
-
- pype.modules.log_viewer.tray
-
-Submodules
-----------
-
-.. toctree::
- :maxdepth: 10
-
- pype.modules.log_viewer.log_view_module
diff --git a/docs/source/pype.modules.log_viewer.tray.app.rst b/docs/source/pype.modules.log_viewer.tray.app.rst
deleted file mode 100644
index 0948a05594..0000000000
--- a/docs/source/pype.modules.log_viewer.tray.app.rst
+++ /dev/null
@@ -1,7 +0,0 @@
-pype.modules.log\_viewer.tray.app module
-========================================
-
-.. automodule:: pype.modules.log_viewer.tray.app
- :members:
- :undoc-members:
- :show-inheritance:
diff --git a/docs/source/pype.modules.log_viewer.tray.models.rst b/docs/source/pype.modules.log_viewer.tray.models.rst
deleted file mode 100644
index 4da3887600..0000000000
--- a/docs/source/pype.modules.log_viewer.tray.models.rst
+++ /dev/null
@@ -1,7 +0,0 @@
-pype.modules.log\_viewer.tray.models module
-===========================================
-
-.. automodule:: pype.modules.log_viewer.tray.models
- :members:
- :undoc-members:
- :show-inheritance:
diff --git a/docs/source/pype.modules.log_viewer.tray.rst b/docs/source/pype.modules.log_viewer.tray.rst
deleted file mode 100644
index 5f4b92f627..0000000000
--- a/docs/source/pype.modules.log_viewer.tray.rst
+++ /dev/null
@@ -1,17 +0,0 @@
-pype.modules.log\_viewer.tray package
-=====================================
-
-.. automodule:: pype.modules.log_viewer.tray
- :members:
- :undoc-members:
- :show-inheritance:
-
-Submodules
-----------
-
-.. toctree::
- :maxdepth: 10
-
- pype.modules.log_viewer.tray.app
- pype.modules.log_viewer.tray.models
- pype.modules.log_viewer.tray.widgets
diff --git a/docs/source/pype.modules.log_viewer.tray.widgets.rst b/docs/source/pype.modules.log_viewer.tray.widgets.rst
deleted file mode 100644
index cb57c96559..0000000000
--- a/docs/source/pype.modules.log_viewer.tray.widgets.rst
+++ /dev/null
@@ -1,7 +0,0 @@
-pype.modules.log\_viewer.tray.widgets module
-============================================
-
-.. automodule:: pype.modules.log_viewer.tray.widgets
- :members:
- :undoc-members:
- :show-inheritance:
diff --git a/docs/source/pype.modules.muster.muster.rst b/docs/source/pype.modules.muster.muster.rst
deleted file mode 100644
index d3ba1e7052..0000000000
--- a/docs/source/pype.modules.muster.muster.rst
+++ /dev/null
@@ -1,7 +0,0 @@
-pype.modules.muster.muster module
-=================================
-
-.. automodule:: pype.modules.muster.muster
- :members:
- :undoc-members:
- :show-inheritance:
diff --git a/docs/source/pype.modules.muster.rst b/docs/source/pype.modules.muster.rst
deleted file mode 100644
index d8d0f762f4..0000000000
--- a/docs/source/pype.modules.muster.rst
+++ /dev/null
@@ -1,26 +0,0 @@
-pype.modules.muster package
-===========================
-
-.. automodule:: pype.modules.muster
- :members:
- :undoc-members:
- :show-inheritance:
-
-Submodules
-----------
-
-pype.modules.muster.muster module
----------------------------------
-
-.. automodule:: pype.modules.muster.muster
- :members:
- :undoc-members:
- :show-inheritance:
-
-pype.modules.muster.widget\_login module
-----------------------------------------
-
-.. automodule:: pype.modules.muster.widget_login
- :members:
- :undoc-members:
- :show-inheritance:
diff --git a/docs/source/pype.modules.muster.widget_login.rst b/docs/source/pype.modules.muster.widget_login.rst
deleted file mode 100644
index 1c59cec820..0000000000
--- a/docs/source/pype.modules.muster.widget_login.rst
+++ /dev/null
@@ -1,7 +0,0 @@
-pype.modules.muster.widget\_login module
-========================================
-
-.. automodule:: pype.modules.muster.widget_login
- :members:
- :undoc-members:
- :show-inheritance:
diff --git a/docs/source/pype.modules.rest_api.base_class.rst b/docs/source/pype.modules.rest_api.base_class.rst
deleted file mode 100644
index c2a1030a78..0000000000
--- a/docs/source/pype.modules.rest_api.base_class.rst
+++ /dev/null
@@ -1,7 +0,0 @@
-pype.modules.rest\_api.base\_class module
-=========================================
-
-.. automodule:: pype.modules.rest_api.base_class
- :members:
- :undoc-members:
- :show-inheritance:
diff --git a/docs/source/pype.modules.rest_api.lib.exceptions.rst b/docs/source/pype.modules.rest_api.lib.exceptions.rst
deleted file mode 100644
index d755420ad0..0000000000
--- a/docs/source/pype.modules.rest_api.lib.exceptions.rst
+++ /dev/null
@@ -1,7 +0,0 @@
-pype.modules.rest\_api.lib.exceptions module
-============================================
-
-.. automodule:: pype.modules.rest_api.lib.exceptions
- :members:
- :undoc-members:
- :show-inheritance:
diff --git a/docs/source/pype.modules.rest_api.lib.factory.rst b/docs/source/pype.modules.rest_api.lib.factory.rst
deleted file mode 100644
index 2131d1b8da..0000000000
--- a/docs/source/pype.modules.rest_api.lib.factory.rst
+++ /dev/null
@@ -1,7 +0,0 @@
-pype.modules.rest\_api.lib.factory module
-=========================================
-
-.. automodule:: pype.modules.rest_api.lib.factory
- :members:
- :undoc-members:
- :show-inheritance:
diff --git a/docs/source/pype.modules.rest_api.lib.handler.rst b/docs/source/pype.modules.rest_api.lib.handler.rst
deleted file mode 100644
index 6e340daf9b..0000000000
--- a/docs/source/pype.modules.rest_api.lib.handler.rst
+++ /dev/null
@@ -1,7 +0,0 @@
-pype.modules.rest\_api.lib.handler module
-=========================================
-
-.. automodule:: pype.modules.rest_api.lib.handler
- :members:
- :undoc-members:
- :show-inheritance:
diff --git a/docs/source/pype.modules.rest_api.lib.lib.rst b/docs/source/pype.modules.rest_api.lib.lib.rst
deleted file mode 100644
index 19663788e0..0000000000
--- a/docs/source/pype.modules.rest_api.lib.lib.rst
+++ /dev/null
@@ -1,7 +0,0 @@
-pype.modules.rest\_api.lib.lib module
-=====================================
-
-.. automodule:: pype.modules.rest_api.lib.lib
- :members:
- :undoc-members:
- :show-inheritance:
diff --git a/docs/source/pype.modules.rest_api.lib.rst b/docs/source/pype.modules.rest_api.lib.rst
deleted file mode 100644
index ed8288ee73..0000000000
--- a/docs/source/pype.modules.rest_api.lib.rst
+++ /dev/null
@@ -1,42 +0,0 @@
-pype.modules.rest\_api.lib package
-==================================
-
-.. automodule:: pype.modules.rest_api.lib
- :members:
- :undoc-members:
- :show-inheritance:
-
-Submodules
-----------
-
-pype.modules.rest\_api.lib.exceptions module
---------------------------------------------
-
-.. automodule:: pype.modules.rest_api.lib.exceptions
- :members:
- :undoc-members:
- :show-inheritance:
-
-pype.modules.rest\_api.lib.factory module
------------------------------------------
-
-.. automodule:: pype.modules.rest_api.lib.factory
- :members:
- :undoc-members:
- :show-inheritance:
-
-pype.modules.rest\_api.lib.handler module
------------------------------------------
-
-.. automodule:: pype.modules.rest_api.lib.handler
- :members:
- :undoc-members:
- :show-inheritance:
-
-pype.modules.rest\_api.lib.lib module
--------------------------------------
-
-.. automodule:: pype.modules.rest_api.lib.lib
- :members:
- :undoc-members:
- :show-inheritance:
diff --git a/docs/source/pype.modules.rest_api.rest_api.rst b/docs/source/pype.modules.rest_api.rest_api.rst
deleted file mode 100644
index e3d951ac9f..0000000000
--- a/docs/source/pype.modules.rest_api.rest_api.rst
+++ /dev/null
@@ -1,7 +0,0 @@
-pype.modules.rest\_api.rest\_api module
-=======================================
-
-.. automodule:: pype.modules.rest_api.rest_api
- :members:
- :undoc-members:
- :show-inheritance:
diff --git a/docs/source/pype.modules.rest_api.rst b/docs/source/pype.modules.rest_api.rst
deleted file mode 100644
index 09c58c84f8..0000000000
--- a/docs/source/pype.modules.rest_api.rst
+++ /dev/null
@@ -1,34 +0,0 @@
-pype.modules.rest\_api package
-==============================
-
-.. automodule:: pype.modules.rest_api
- :members:
- :undoc-members:
- :show-inheritance:
-
-Subpackages
------------
-
-.. toctree::
- :maxdepth: 6
-
- pype.modules.rest_api.lib
-
-Submodules
-----------
-
-pype.modules.rest\_api.base\_class module
------------------------------------------
-
-.. automodule:: pype.modules.rest_api.base_class
- :members:
- :undoc-members:
- :show-inheritance:
-
-pype.modules.rest\_api.rest\_api module
----------------------------------------
-
-.. automodule:: pype.modules.rest_api.rest_api
- :members:
- :undoc-members:
- :show-inheritance:
diff --git a/docs/source/pype.modules.rst b/docs/source/pype.modules.rst
deleted file mode 100644
index 148c2084b4..0000000000
--- a/docs/source/pype.modules.rst
+++ /dev/null
@@ -1,36 +0,0 @@
-pype.modules package
-====================
-
-.. automodule:: pype.modules
- :members:
- :undoc-members:
- :show-inheritance:
-
-Subpackages
------------
-
-.. toctree::
- :maxdepth: 6
-
- pype.modules.adobe_communicator
- pype.modules.avalon_apps
- pype.modules.clockify
- pype.modules.ftrack
- pype.modules.idle_manager
- pype.modules.muster
- pype.modules.rest_api
- pype.modules.standalonepublish
- pype.modules.timers_manager
- pype.modules.user
- pype.modules.websocket_server
-
-Submodules
-----------
-
-pype.modules.base module
-------------------------
-
-.. automodule:: pype.modules.base
- :members:
- :undoc-members:
- :show-inheritance:
diff --git a/docs/source/pype.modules.settings_action.rst b/docs/source/pype.modules.settings_action.rst
deleted file mode 100644
index 10f0881ced..0000000000
--- a/docs/source/pype.modules.settings_action.rst
+++ /dev/null
@@ -1,7 +0,0 @@
-pype.modules.settings\_action module
-====================================
-
-.. automodule:: pype.modules.settings_action
- :members:
- :undoc-members:
- :show-inheritance:
diff --git a/docs/source/pype.modules.standalonepublish.rst b/docs/source/pype.modules.standalonepublish.rst
deleted file mode 100644
index 2ed366af5c..0000000000
--- a/docs/source/pype.modules.standalonepublish.rst
+++ /dev/null
@@ -1,18 +0,0 @@
-pype.modules.standalonepublish package
-======================================
-
-.. automodule:: pype.modules.standalonepublish
- :members:
- :undoc-members:
- :show-inheritance:
-
-Submodules
-----------
-
-pype.modules.standalonepublish.standalonepublish\_module module
----------------------------------------------------------------
-
-.. automodule:: pype.modules.standalonepublish.standalonepublish_module
- :members:
- :undoc-members:
- :show-inheritance:
diff --git a/docs/source/pype.modules.standalonepublish.standalonepublish_module.rst b/docs/source/pype.modules.standalonepublish.standalonepublish_module.rst
deleted file mode 100644
index a78826a4b4..0000000000
--- a/docs/source/pype.modules.standalonepublish.standalonepublish_module.rst
+++ /dev/null
@@ -1,7 +0,0 @@
-pype.modules.standalonepublish.standalonepublish\_module module
-===============================================================
-
-.. automodule:: pype.modules.standalonepublish.standalonepublish_module
- :members:
- :undoc-members:
- :show-inheritance:
diff --git a/docs/source/pype.modules.standalonepublish_action.rst b/docs/source/pype.modules.standalonepublish_action.rst
deleted file mode 100644
index d51dbcefa0..0000000000
--- a/docs/source/pype.modules.standalonepublish_action.rst
+++ /dev/null
@@ -1,7 +0,0 @@
-pype.modules.standalonepublish\_action module
-=============================================
-
-.. automodule:: pype.modules.standalonepublish_action
- :members:
- :undoc-members:
- :show-inheritance:
diff --git a/docs/source/pype.modules.sync_server.rst b/docs/source/pype.modules.sync_server.rst
deleted file mode 100644
index a26dc7e212..0000000000
--- a/docs/source/pype.modules.sync_server.rst
+++ /dev/null
@@ -1,16 +0,0 @@
-pype.modules.sync\_server package
-=================================
-
-.. automodule:: pype.modules.sync_server
- :members:
- :undoc-members:
- :show-inheritance:
-
-Submodules
-----------
-
-.. toctree::
- :maxdepth: 10
-
- pype.modules.sync_server.sync_server
- pype.modules.sync_server.utils
diff --git a/docs/source/pype.modules.sync_server.sync_server.rst b/docs/source/pype.modules.sync_server.sync_server.rst
deleted file mode 100644
index 36d6aa68ed..0000000000
--- a/docs/source/pype.modules.sync_server.sync_server.rst
+++ /dev/null
@@ -1,7 +0,0 @@
-pype.modules.sync\_server.sync\_server module
-=============================================
-
-.. automodule:: pype.modules.sync_server.sync_server
- :members:
- :undoc-members:
- :show-inheritance:
diff --git a/docs/source/pype.modules.sync_server.utils.rst b/docs/source/pype.modules.sync_server.utils.rst
deleted file mode 100644
index 325d5e435d..0000000000
--- a/docs/source/pype.modules.sync_server.utils.rst
+++ /dev/null
@@ -1,7 +0,0 @@
-pype.modules.sync\_server.utils module
-======================================
-
-.. automodule:: pype.modules.sync_server.utils
- :members:
- :undoc-members:
- :show-inheritance:
diff --git a/docs/source/pype.modules.timers_manager.rst b/docs/source/pype.modules.timers_manager.rst
deleted file mode 100644
index 6c971e9dc1..0000000000
--- a/docs/source/pype.modules.timers_manager.rst
+++ /dev/null
@@ -1,26 +0,0 @@
-pype.modules.timers\_manager package
-====================================
-
-.. automodule:: pype.modules.timers_manager
- :members:
- :undoc-members:
- :show-inheritance:
-
-Submodules
-----------
-
-pype.modules.timers\_manager.timers\_manager module
----------------------------------------------------
-
-.. automodule:: pype.modules.timers_manager.timers_manager
- :members:
- :undoc-members:
- :show-inheritance:
-
-pype.modules.timers\_manager.widget\_user\_idle module
-------------------------------------------------------
-
-.. automodule:: pype.modules.timers_manager.widget_user_idle
- :members:
- :undoc-members:
- :show-inheritance:
diff --git a/docs/source/pype.modules.timers_manager.timers_manager.rst b/docs/source/pype.modules.timers_manager.timers_manager.rst
deleted file mode 100644
index fe18e4d15c..0000000000
--- a/docs/source/pype.modules.timers_manager.timers_manager.rst
+++ /dev/null
@@ -1,7 +0,0 @@
-pype.modules.timers\_manager.timers\_manager module
-===================================================
-
-.. automodule:: pype.modules.timers_manager.timers_manager
- :members:
- :undoc-members:
- :show-inheritance:
diff --git a/docs/source/pype.modules.timers_manager.widget_user_idle.rst b/docs/source/pype.modules.timers_manager.widget_user_idle.rst
deleted file mode 100644
index b072879c7a..0000000000
--- a/docs/source/pype.modules.timers_manager.widget_user_idle.rst
+++ /dev/null
@@ -1,7 +0,0 @@
-pype.modules.timers\_manager.widget\_user\_idle module
-======================================================
-
-.. automodule:: pype.modules.timers_manager.widget_user_idle
- :members:
- :undoc-members:
- :show-inheritance:
diff --git a/docs/source/pype.modules.user.rst b/docs/source/pype.modules.user.rst
deleted file mode 100644
index d181b263e5..0000000000
--- a/docs/source/pype.modules.user.rst
+++ /dev/null
@@ -1,26 +0,0 @@
-pype.modules.user package
-=========================
-
-.. automodule:: pype.modules.user
- :members:
- :undoc-members:
- :show-inheritance:
-
-Submodules
-----------
-
-pype.modules.user.user\_module module
--------------------------------------
-
-.. automodule:: pype.modules.user.user_module
- :members:
- :undoc-members:
- :show-inheritance:
-
-pype.modules.user.widget\_user module
--------------------------------------
-
-.. automodule:: pype.modules.user.widget_user
- :members:
- :undoc-members:
- :show-inheritance:
diff --git a/docs/source/pype.modules.user.user_module.rst b/docs/source/pype.modules.user.user_module.rst
deleted file mode 100644
index a8e0cd6bad..0000000000
--- a/docs/source/pype.modules.user.user_module.rst
+++ /dev/null
@@ -1,7 +0,0 @@
-pype.modules.user.user\_module module
-=====================================
-
-.. automodule:: pype.modules.user.user_module
- :members:
- :undoc-members:
- :show-inheritance:
diff --git a/docs/source/pype.modules.user.widget_user.rst b/docs/source/pype.modules.user.widget_user.rst
deleted file mode 100644
index 2979e5ead4..0000000000
--- a/docs/source/pype.modules.user.widget_user.rst
+++ /dev/null
@@ -1,7 +0,0 @@
-pype.modules.user.widget\_user module
-=====================================
-
-.. automodule:: pype.modules.user.widget_user
- :members:
- :undoc-members:
- :show-inheritance:
diff --git a/docs/source/pype.modules.websocket_server.hosts.aftereffects.rst b/docs/source/pype.modules.websocket_server.hosts.aftereffects.rst
deleted file mode 100644
index 9f4720ae14..0000000000
--- a/docs/source/pype.modules.websocket_server.hosts.aftereffects.rst
+++ /dev/null
@@ -1,7 +0,0 @@
-pype.modules.websocket\_server.hosts.aftereffects module
-========================================================
-
-.. automodule:: pype.modules.websocket_server.hosts.aftereffects
- :members:
- :undoc-members:
- :show-inheritance:
diff --git a/docs/source/pype.modules.websocket_server.hosts.external_app_1.rst b/docs/source/pype.modules.websocket_server.hosts.external_app_1.rst
deleted file mode 100644
index 4ac69d9015..0000000000
--- a/docs/source/pype.modules.websocket_server.hosts.external_app_1.rst
+++ /dev/null
@@ -1,7 +0,0 @@
-pype.modules.websocket\_server.hosts.external\_app\_1 module
-============================================================
-
-.. automodule:: pype.modules.websocket_server.hosts.external_app_1
- :members:
- :undoc-members:
- :show-inheritance:
diff --git a/docs/source/pype.modules.websocket_server.hosts.photoshop.rst b/docs/source/pype.modules.websocket_server.hosts.photoshop.rst
deleted file mode 100644
index cbda61275a..0000000000
--- a/docs/source/pype.modules.websocket_server.hosts.photoshop.rst
+++ /dev/null
@@ -1,7 +0,0 @@
-pype.modules.websocket\_server.hosts.photoshop module
-=====================================================
-
-.. automodule:: pype.modules.websocket_server.hosts.photoshop
- :members:
- :undoc-members:
- :show-inheritance:
diff --git a/docs/source/pype.modules.websocket_server.hosts.rst b/docs/source/pype.modules.websocket_server.hosts.rst
deleted file mode 100644
index d5ce7c3f8e..0000000000
--- a/docs/source/pype.modules.websocket_server.hosts.rst
+++ /dev/null
@@ -1,26 +0,0 @@
-pype.modules.websocket\_server.hosts package
-============================================
-
-.. automodule:: pype.modules.websocket_server.hosts
- :members:
- :undoc-members:
- :show-inheritance:
-
-Submodules
-----------
-
-pype.modules.websocket\_server.hosts.external\_app\_1 module
-------------------------------------------------------------
-
-.. automodule:: pype.modules.websocket_server.hosts.external_app_1
- :members:
- :undoc-members:
- :show-inheritance:
-
-pype.modules.websocket\_server.hosts.photoshop module
------------------------------------------------------
-
-.. automodule:: pype.modules.websocket_server.hosts.photoshop
- :members:
- :undoc-members:
- :show-inheritance:
diff --git a/docs/source/pype.modules.websocket_server.rst b/docs/source/pype.modules.websocket_server.rst
deleted file mode 100644
index a83d371df1..0000000000
--- a/docs/source/pype.modules.websocket_server.rst
+++ /dev/null
@@ -1,26 +0,0 @@
-pype.modules.websocket\_server package
-======================================
-
-.. automodule:: pype.modules.websocket_server
- :members:
- :undoc-members:
- :show-inheritance:
-
-Subpackages
------------
-
-.. toctree::
- :maxdepth: 6
-
- pype.modules.websocket_server.hosts
-
-Submodules
-----------
-
-pype.modules.websocket\_server.websocket\_server module
--------------------------------------------------------
-
-.. automodule:: pype.modules.websocket_server.websocket_server
- :members:
- :undoc-members:
- :show-inheritance:
diff --git a/docs/source/pype.modules.websocket_server.websocket_server.rst b/docs/source/pype.modules.websocket_server.websocket_server.rst
deleted file mode 100644
index 354c9e6cf9..0000000000
--- a/docs/source/pype.modules.websocket_server.websocket_server.rst
+++ /dev/null
@@ -1,7 +0,0 @@
-pype.modules.websocket\_server.websocket\_server module
-=======================================================
-
-.. automodule:: pype.modules.websocket_server.websocket_server
- :members:
- :undoc-members:
- :show-inheritance:
diff --git a/docs/source/pype.modules_manager.rst b/docs/source/pype.modules_manager.rst
deleted file mode 100644
index a5f2327d65..0000000000
--- a/docs/source/pype.modules_manager.rst
+++ /dev/null
@@ -1,7 +0,0 @@
-pype.modules\_manager module
-============================
-
-.. automodule:: pype.modules_manager
- :members:
- :undoc-members:
- :show-inheritance:
diff --git a/docs/source/pype.plugin.rst b/docs/source/pype.plugin.rst
deleted file mode 100644
index c20bb77b2b..0000000000
--- a/docs/source/pype.plugin.rst
+++ /dev/null
@@ -1,7 +0,0 @@
-pype.plugin module
-==================
-
-.. automodule:: pype.plugin
- :members:
- :undoc-members:
- :show-inheritance:
diff --git a/docs/source/pype.plugins.maya.publish.collect_animation.rst b/docs/source/pype.plugins.maya.publish.collect_animation.rst
deleted file mode 100644
index 497c497057..0000000000
--- a/docs/source/pype.plugins.maya.publish.collect_animation.rst
+++ /dev/null
@@ -1,7 +0,0 @@
-pype.plugins.maya.publish.collect\_animation module
-===================================================
-
-.. automodule:: pype.plugins.maya.publish.collect_animation
- :members:
- :undoc-members:
- :show-inheritance:
diff --git a/docs/source/pype.plugins.maya.publish.collect_ass.rst b/docs/source/pype.plugins.maya.publish.collect_ass.rst
deleted file mode 100644
index a44e61ce98..0000000000
--- a/docs/source/pype.plugins.maya.publish.collect_ass.rst
+++ /dev/null
@@ -1,7 +0,0 @@
-pype.plugins.maya.publish.collect\_ass module
-=============================================
-
-.. automodule:: pype.plugins.maya.publish.collect_ass
- :members:
- :undoc-members:
- :show-inheritance:
diff --git a/docs/source/pype.plugins.maya.publish.collect_assembly.rst b/docs/source/pype.plugins.maya.publish.collect_assembly.rst
deleted file mode 100644
index 5baa91818b..0000000000
--- a/docs/source/pype.plugins.maya.publish.collect_assembly.rst
+++ /dev/null
@@ -1,7 +0,0 @@
-pype.plugins.maya.publish.collect\_assembly module
-==================================================
-
-.. automodule:: pype.plugins.maya.publish.collect_assembly
- :members:
- :undoc-members:
- :show-inheritance:
diff --git a/docs/source/pype.plugins.maya.publish.collect_file_dependencies.rst b/docs/source/pype.plugins.maya.publish.collect_file_dependencies.rst
deleted file mode 100644
index efe857140e..0000000000
--- a/docs/source/pype.plugins.maya.publish.collect_file_dependencies.rst
+++ /dev/null
@@ -1,7 +0,0 @@
-pype.plugins.maya.publish.collect\_file\_dependencies module
-============================================================
-
-.. automodule:: pype.plugins.maya.publish.collect_file_dependencies
- :members:
- :undoc-members:
- :show-inheritance:
diff --git a/docs/source/pype.plugins.maya.publish.collect_ftrack_family.rst b/docs/source/pype.plugins.maya.publish.collect_ftrack_family.rst
deleted file mode 100644
index 872bbc69a4..0000000000
--- a/docs/source/pype.plugins.maya.publish.collect_ftrack_family.rst
+++ /dev/null
@@ -1,7 +0,0 @@
-pype.plugins.maya.publish.collect\_ftrack\_family module
-========================================================
-
-.. automodule:: pype.plugins.maya.publish.collect_ftrack_family
- :members:
- :undoc-members:
- :show-inheritance:
diff --git a/docs/source/pype.plugins.maya.publish.collect_history.rst b/docs/source/pype.plugins.maya.publish.collect_history.rst
deleted file mode 100644
index 5a98778c24..0000000000
--- a/docs/source/pype.plugins.maya.publish.collect_history.rst
+++ /dev/null
@@ -1,7 +0,0 @@
-pype.plugins.maya.publish.collect\_history module
-=================================================
-
-.. automodule:: pype.plugins.maya.publish.collect_history
- :members:
- :undoc-members:
- :show-inheritance:
diff --git a/docs/source/pype.plugins.maya.publish.collect_instances.rst b/docs/source/pype.plugins.maya.publish.collect_instances.rst
deleted file mode 100644
index 33c8b97597..0000000000
--- a/docs/source/pype.plugins.maya.publish.collect_instances.rst
+++ /dev/null
@@ -1,7 +0,0 @@
-pype.plugins.maya.publish.collect\_instances module
-===================================================
-
-.. automodule:: pype.plugins.maya.publish.collect_instances
- :members:
- :undoc-members:
- :show-inheritance:
diff --git a/docs/source/pype.plugins.maya.publish.collect_look.rst b/docs/source/pype.plugins.maya.publish.collect_look.rst
deleted file mode 100644
index 234fcf20d1..0000000000
--- a/docs/source/pype.plugins.maya.publish.collect_look.rst
+++ /dev/null
@@ -1,7 +0,0 @@
-pype.plugins.maya.publish.collect\_look module
-==============================================
-
-.. automodule:: pype.plugins.maya.publish.collect_look
- :members:
- :undoc-members:
- :show-inheritance:
diff --git a/docs/source/pype.plugins.maya.publish.collect_maya_units.rst b/docs/source/pype.plugins.maya.publish.collect_maya_units.rst
deleted file mode 100644
index 0cb01b0fa7..0000000000
--- a/docs/source/pype.plugins.maya.publish.collect_maya_units.rst
+++ /dev/null
@@ -1,7 +0,0 @@
-pype.plugins.maya.publish.collect\_maya\_units module
-=====================================================
-
-.. automodule:: pype.plugins.maya.publish.collect_maya_units
- :members:
- :undoc-members:
- :show-inheritance:
diff --git a/docs/source/pype.plugins.maya.publish.collect_maya_workspace.rst b/docs/source/pype.plugins.maya.publish.collect_maya_workspace.rst
deleted file mode 100644
index 7447052004..0000000000
--- a/docs/source/pype.plugins.maya.publish.collect_maya_workspace.rst
+++ /dev/null
@@ -1,7 +0,0 @@
-pype.plugins.maya.publish.collect\_maya\_workspace module
-=========================================================
-
-.. automodule:: pype.plugins.maya.publish.collect_maya_workspace
- :members:
- :undoc-members:
- :show-inheritance:
diff --git a/docs/source/pype.plugins.maya.publish.collect_mayaascii.rst b/docs/source/pype.plugins.maya.publish.collect_mayaascii.rst
deleted file mode 100644
index 14fe826229..0000000000
--- a/docs/source/pype.plugins.maya.publish.collect_mayaascii.rst
+++ /dev/null
@@ -1,7 +0,0 @@
-pype.plugins.maya.publish.collect\_mayaascii module
-===================================================
-
-.. automodule:: pype.plugins.maya.publish.collect_mayaascii
- :members:
- :undoc-members:
- :show-inheritance:
diff --git a/docs/source/pype.plugins.maya.publish.collect_model.rst b/docs/source/pype.plugins.maya.publish.collect_model.rst
deleted file mode 100644
index b30bf3fb22..0000000000
--- a/docs/source/pype.plugins.maya.publish.collect_model.rst
+++ /dev/null
@@ -1,7 +0,0 @@
-pype.plugins.maya.publish.collect\_model module
-===============================================
-
-.. automodule:: pype.plugins.maya.publish.collect_model
- :members:
- :undoc-members:
- :show-inheritance:
diff --git a/docs/source/pype.plugins.maya.publish.collect_remove_marked.rst b/docs/source/pype.plugins.maya.publish.collect_remove_marked.rst
deleted file mode 100644
index a0bf9498d7..0000000000
--- a/docs/source/pype.plugins.maya.publish.collect_remove_marked.rst
+++ /dev/null
@@ -1,7 +0,0 @@
-pype.plugins.maya.publish.collect\_remove\_marked module
-========================================================
-
-.. automodule:: pype.plugins.maya.publish.collect_remove_marked
- :members:
- :undoc-members:
- :show-inheritance:
diff --git a/docs/source/pype.plugins.maya.publish.collect_render.rst b/docs/source/pype.plugins.maya.publish.collect_render.rst
deleted file mode 100644
index 6de8827119..0000000000
--- a/docs/source/pype.plugins.maya.publish.collect_render.rst
+++ /dev/null
@@ -1,7 +0,0 @@
-pype.plugins.maya.publish.collect\_render module
-================================================
-
-.. automodule:: pype.plugins.maya.publish.collect_render
- :members:
- :undoc-members:
- :show-inheritance:
diff --git a/docs/source/pype.plugins.maya.publish.collect_render_layer_aovs.rst b/docs/source/pype.plugins.maya.publish.collect_render_layer_aovs.rst
deleted file mode 100644
index ab511fc5dd..0000000000
--- a/docs/source/pype.plugins.maya.publish.collect_render_layer_aovs.rst
+++ /dev/null
@@ -1,7 +0,0 @@
-pype.plugins.maya.publish.collect\_render\_layer\_aovs module
-=============================================================
-
-.. automodule:: pype.plugins.maya.publish.collect_render_layer_aovs
- :members:
- :undoc-members:
- :show-inheritance:
diff --git a/docs/source/pype.plugins.maya.publish.collect_renderable_camera.rst b/docs/source/pype.plugins.maya.publish.collect_renderable_camera.rst
deleted file mode 100644
index c98e8000a1..0000000000
--- a/docs/source/pype.plugins.maya.publish.collect_renderable_camera.rst
+++ /dev/null
@@ -1,7 +0,0 @@
-pype.plugins.maya.publish.collect\_renderable\_camera module
-============================================================
-
-.. automodule:: pype.plugins.maya.publish.collect_renderable_camera
- :members:
- :undoc-members:
- :show-inheritance:
diff --git a/docs/source/pype.plugins.maya.publish.collect_review.rst b/docs/source/pype.plugins.maya.publish.collect_review.rst
deleted file mode 100644
index d73127aa85..0000000000
--- a/docs/source/pype.plugins.maya.publish.collect_review.rst
+++ /dev/null
@@ -1,7 +0,0 @@
-pype.plugins.maya.publish.collect\_review module
-================================================
-
-.. automodule:: pype.plugins.maya.publish.collect_review
- :members:
- :undoc-members:
- :show-inheritance:
diff --git a/docs/source/pype.plugins.maya.publish.collect_rig.rst b/docs/source/pype.plugins.maya.publish.collect_rig.rst
deleted file mode 100644
index e7c0528482..0000000000
--- a/docs/source/pype.plugins.maya.publish.collect_rig.rst
+++ /dev/null
@@ -1,7 +0,0 @@
-pype.plugins.maya.publish.collect\_rig module
-=============================================
-
-.. automodule:: pype.plugins.maya.publish.collect_rig
- :members:
- :undoc-members:
- :show-inheritance:
diff --git a/docs/source/pype.plugins.maya.publish.collect_scene.rst b/docs/source/pype.plugins.maya.publish.collect_scene.rst
deleted file mode 100644
index c5c2fef222..0000000000
--- a/docs/source/pype.plugins.maya.publish.collect_scene.rst
+++ /dev/null
@@ -1,7 +0,0 @@
-pype.plugins.maya.publish.collect\_scene module
-===============================================
-
-.. automodule:: pype.plugins.maya.publish.collect_scene
- :members:
- :undoc-members:
- :show-inheritance:
diff --git a/docs/source/pype.plugins.maya.publish.collect_unreal_staticmesh.rst b/docs/source/pype.plugins.maya.publish.collect_unreal_staticmesh.rst
deleted file mode 100644
index 673f0865fd..0000000000
--- a/docs/source/pype.plugins.maya.publish.collect_unreal_staticmesh.rst
+++ /dev/null
@@ -1,7 +0,0 @@
-pype.plugins.maya.publish.collect\_unreal\_staticmesh module
-============================================================
-
-.. automodule:: pype.plugins.maya.publish.collect_unreal_staticmesh
- :members:
- :undoc-members:
- :show-inheritance:
diff --git a/docs/source/pype.plugins.maya.publish.collect_workscene_fps.rst b/docs/source/pype.plugins.maya.publish.collect_workscene_fps.rst
deleted file mode 100644
index ed4386a7ba..0000000000
--- a/docs/source/pype.plugins.maya.publish.collect_workscene_fps.rst
+++ /dev/null
@@ -1,7 +0,0 @@
-pype.plugins.maya.publish.collect\_workscene\_fps module
-========================================================
-
-.. automodule:: pype.plugins.maya.publish.collect_workscene_fps
- :members:
- :undoc-members:
- :show-inheritance:
diff --git a/docs/source/pype.plugins.maya.publish.collect_yeti_cache.rst b/docs/source/pype.plugins.maya.publish.collect_yeti_cache.rst
deleted file mode 100644
index 32ab50baca..0000000000
--- a/docs/source/pype.plugins.maya.publish.collect_yeti_cache.rst
+++ /dev/null
@@ -1,7 +0,0 @@
-pype.plugins.maya.publish.collect\_yeti\_cache module
-=====================================================
-
-.. automodule:: pype.plugins.maya.publish.collect_yeti_cache
- :members:
- :undoc-members:
- :show-inheritance:
diff --git a/docs/source/pype.plugins.maya.publish.collect_yeti_rig.rst b/docs/source/pype.plugins.maya.publish.collect_yeti_rig.rst
deleted file mode 100644
index 8cf968b7c5..0000000000
--- a/docs/source/pype.plugins.maya.publish.collect_yeti_rig.rst
+++ /dev/null
@@ -1,7 +0,0 @@
-pype.plugins.maya.publish.collect\_yeti\_rig module
-===================================================
-
-.. automodule:: pype.plugins.maya.publish.collect_yeti_rig
- :members:
- :undoc-members:
- :show-inheritance:
diff --git a/docs/source/pype.plugins.maya.publish.determine_future_version.rst b/docs/source/pype.plugins.maya.publish.determine_future_version.rst
deleted file mode 100644
index 55c6155680..0000000000
--- a/docs/source/pype.plugins.maya.publish.determine_future_version.rst
+++ /dev/null
@@ -1,7 +0,0 @@
-pype.plugins.maya.publish.determine\_future\_version module
-===========================================================
-
-.. automodule:: pype.plugins.maya.publish.determine_future_version
- :members:
- :undoc-members:
- :show-inheritance:
diff --git a/docs/source/pype.plugins.maya.publish.extract_animation.rst b/docs/source/pype.plugins.maya.publish.extract_animation.rst
deleted file mode 100644
index 3649723042..0000000000
--- a/docs/source/pype.plugins.maya.publish.extract_animation.rst
+++ /dev/null
@@ -1,7 +0,0 @@
-pype.plugins.maya.publish.extract\_animation module
-===================================================
-
-.. automodule:: pype.plugins.maya.publish.extract_animation
- :members:
- :undoc-members:
- :show-inheritance:
diff --git a/docs/source/pype.plugins.maya.publish.extract_ass.rst b/docs/source/pype.plugins.maya.publish.extract_ass.rst
deleted file mode 100644
index be8123e5d7..0000000000
--- a/docs/source/pype.plugins.maya.publish.extract_ass.rst
+++ /dev/null
@@ -1,7 +0,0 @@
-pype.plugins.maya.publish.extract\_ass module
-=============================================
-
-.. automodule:: pype.plugins.maya.publish.extract_ass
- :members:
- :undoc-members:
- :show-inheritance:
diff --git a/docs/source/pype.plugins.maya.publish.extract_assembly.rst b/docs/source/pype.plugins.maya.publish.extract_assembly.rst
deleted file mode 100644
index b36e8f6d30..0000000000
--- a/docs/source/pype.plugins.maya.publish.extract_assembly.rst
+++ /dev/null
@@ -1,7 +0,0 @@
-pype.plugins.maya.publish.extract\_assembly module
-==================================================
-
-.. automodule:: pype.plugins.maya.publish.extract_assembly
- :members:
- :undoc-members:
- :show-inheritance:
diff --git a/docs/source/pype.plugins.maya.publish.extract_assproxy.rst b/docs/source/pype.plugins.maya.publish.extract_assproxy.rst
deleted file mode 100644
index fc97a2ee46..0000000000
--- a/docs/source/pype.plugins.maya.publish.extract_assproxy.rst
+++ /dev/null
@@ -1,7 +0,0 @@
-pype.plugins.maya.publish.extract\_assproxy module
-==================================================
-
-.. automodule:: pype.plugins.maya.publish.extract_assproxy
- :members:
- :undoc-members:
- :show-inheritance:
diff --git a/docs/source/pype.plugins.maya.publish.extract_camera_alembic.rst b/docs/source/pype.plugins.maya.publish.extract_camera_alembic.rst
deleted file mode 100644
index a9df3da011..0000000000
--- a/docs/source/pype.plugins.maya.publish.extract_camera_alembic.rst
+++ /dev/null
@@ -1,7 +0,0 @@
-pype.plugins.maya.publish.extract\_camera\_alembic module
-=========================================================
-
-.. automodule:: pype.plugins.maya.publish.extract_camera_alembic
- :members:
- :undoc-members:
- :show-inheritance:
diff --git a/docs/source/pype.plugins.maya.publish.extract_camera_mayaScene.rst b/docs/source/pype.plugins.maya.publish.extract_camera_mayaScene.rst
deleted file mode 100644
index db1799f52f..0000000000
--- a/docs/source/pype.plugins.maya.publish.extract_camera_mayaScene.rst
+++ /dev/null
@@ -1,7 +0,0 @@
-pype.plugins.maya.publish.extract\_camera\_mayaScene module
-===========================================================
-
-.. automodule:: pype.plugins.maya.publish.extract_camera_mayaScene
- :members:
- :undoc-members:
- :show-inheritance:
diff --git a/docs/source/pype.plugins.maya.publish.extract_fbx.rst b/docs/source/pype.plugins.maya.publish.extract_fbx.rst
deleted file mode 100644
index fffd5a6394..0000000000
--- a/docs/source/pype.plugins.maya.publish.extract_fbx.rst
+++ /dev/null
@@ -1,7 +0,0 @@
-pype.plugins.maya.publish.extract\_fbx module
-=============================================
-
-.. automodule:: pype.plugins.maya.publish.extract_fbx
- :members:
- :undoc-members:
- :show-inheritance:
diff --git a/docs/source/pype.plugins.maya.publish.extract_look.rst b/docs/source/pype.plugins.maya.publish.extract_look.rst
deleted file mode 100644
index f2708678ce..0000000000
--- a/docs/source/pype.plugins.maya.publish.extract_look.rst
+++ /dev/null
@@ -1,7 +0,0 @@
-pype.plugins.maya.publish.extract\_look module
-==============================================
-
-.. automodule:: pype.plugins.maya.publish.extract_look
- :members:
- :undoc-members:
- :show-inheritance:
diff --git a/docs/source/pype.plugins.maya.publish.extract_maya_scene_raw.rst b/docs/source/pype.plugins.maya.publish.extract_maya_scene_raw.rst
deleted file mode 100644
index 1e080dd0eb..0000000000
--- a/docs/source/pype.plugins.maya.publish.extract_maya_scene_raw.rst
+++ /dev/null
@@ -1,7 +0,0 @@
-pype.plugins.maya.publish.extract\_maya\_scene\_raw module
-==========================================================
-
-.. automodule:: pype.plugins.maya.publish.extract_maya_scene_raw
- :members:
- :undoc-members:
- :show-inheritance:
diff --git a/docs/source/pype.plugins.maya.publish.extract_model.rst b/docs/source/pype.plugins.maya.publish.extract_model.rst
deleted file mode 100644
index c78b49c777..0000000000
--- a/docs/source/pype.plugins.maya.publish.extract_model.rst
+++ /dev/null
@@ -1,7 +0,0 @@
-pype.plugins.maya.publish.extract\_model module
-===============================================
-
-.. automodule:: pype.plugins.maya.publish.extract_model
- :members:
- :undoc-members:
- :show-inheritance:
diff --git a/docs/source/pype.plugins.maya.publish.extract_playblast.rst b/docs/source/pype.plugins.maya.publish.extract_playblast.rst
deleted file mode 100644
index 1aa284b370..0000000000
--- a/docs/source/pype.plugins.maya.publish.extract_playblast.rst
+++ /dev/null
@@ -1,7 +0,0 @@
-pype.plugins.maya.publish.extract\_playblast module
-===================================================
-
-.. automodule:: pype.plugins.maya.publish.extract_playblast
- :members:
- :undoc-members:
- :show-inheritance:
diff --git a/docs/source/pype.plugins.maya.publish.extract_pointcache.rst b/docs/source/pype.plugins.maya.publish.extract_pointcache.rst
deleted file mode 100644
index 97ebde4933..0000000000
--- a/docs/source/pype.plugins.maya.publish.extract_pointcache.rst
+++ /dev/null
@@ -1,7 +0,0 @@
-pype.plugins.maya.publish.extract\_pointcache module
-====================================================
-
-.. automodule:: pype.plugins.maya.publish.extract_pointcache
- :members:
- :undoc-members:
- :show-inheritance:
diff --git a/docs/source/pype.plugins.maya.publish.extract_rendersetup.rst b/docs/source/pype.plugins.maya.publish.extract_rendersetup.rst
deleted file mode 100644
index 86cb178f42..0000000000
--- a/docs/source/pype.plugins.maya.publish.extract_rendersetup.rst
+++ /dev/null
@@ -1,7 +0,0 @@
-pype.plugins.maya.publish.extract\_rendersetup module
-=====================================================
-
-.. automodule:: pype.plugins.maya.publish.extract_rendersetup
- :members:
- :undoc-members:
- :show-inheritance:
diff --git a/docs/source/pype.plugins.maya.publish.extract_rig.rst b/docs/source/pype.plugins.maya.publish.extract_rig.rst
deleted file mode 100644
index f6419c9473..0000000000
--- a/docs/source/pype.plugins.maya.publish.extract_rig.rst
+++ /dev/null
@@ -1,7 +0,0 @@
-pype.plugins.maya.publish.extract\_rig module
-=============================================
-
-.. automodule:: pype.plugins.maya.publish.extract_rig
- :members:
- :undoc-members:
- :show-inheritance:
diff --git a/docs/source/pype.plugins.maya.publish.extract_thumbnail.rst b/docs/source/pype.plugins.maya.publish.extract_thumbnail.rst
deleted file mode 100644
index 2d03e11d55..0000000000
--- a/docs/source/pype.plugins.maya.publish.extract_thumbnail.rst
+++ /dev/null
@@ -1,7 +0,0 @@
-pype.plugins.maya.publish.extract\_thumbnail module
-===================================================
-
-.. automodule:: pype.plugins.maya.publish.extract_thumbnail
- :members:
- :undoc-members:
- :show-inheritance:
diff --git a/docs/source/pype.plugins.maya.publish.extract_vrayproxy.rst b/docs/source/pype.plugins.maya.publish.extract_vrayproxy.rst
deleted file mode 100644
index 5439ff59ca..0000000000
--- a/docs/source/pype.plugins.maya.publish.extract_vrayproxy.rst
+++ /dev/null
@@ -1,7 +0,0 @@
-pype.plugins.maya.publish.extract\_vrayproxy module
-===================================================
-
-.. automodule:: pype.plugins.maya.publish.extract_vrayproxy
- :members:
- :undoc-members:
- :show-inheritance:
diff --git a/docs/source/pype.plugins.maya.publish.extract_yeti_cache.rst b/docs/source/pype.plugins.maya.publish.extract_yeti_cache.rst
deleted file mode 100644
index 7ad84dfc70..0000000000
--- a/docs/source/pype.plugins.maya.publish.extract_yeti_cache.rst
+++ /dev/null
@@ -1,7 +0,0 @@
-pype.plugins.maya.publish.extract\_yeti\_cache module
-=====================================================
-
-.. automodule:: pype.plugins.maya.publish.extract_yeti_cache
- :members:
- :undoc-members:
- :show-inheritance:
diff --git a/docs/source/pype.plugins.maya.publish.extract_yeti_rig.rst b/docs/source/pype.plugins.maya.publish.extract_yeti_rig.rst
deleted file mode 100644
index 76d483d91b..0000000000
--- a/docs/source/pype.plugins.maya.publish.extract_yeti_rig.rst
+++ /dev/null
@@ -1,7 +0,0 @@
-pype.plugins.maya.publish.extract\_yeti\_rig module
-===================================================
-
-.. automodule:: pype.plugins.maya.publish.extract_yeti_rig
- :members:
- :undoc-members:
- :show-inheritance:
diff --git a/docs/source/pype.plugins.maya.publish.increment_current_file_deadline.rst b/docs/source/pype.plugins.maya.publish.increment_current_file_deadline.rst
deleted file mode 100644
index 97126a6c77..0000000000
--- a/docs/source/pype.plugins.maya.publish.increment_current_file_deadline.rst
+++ /dev/null
@@ -1,7 +0,0 @@
-pype.plugins.maya.publish.increment\_current\_file\_deadline module
-===================================================================
-
-.. automodule:: pype.plugins.maya.publish.increment_current_file_deadline
- :members:
- :undoc-members:
- :show-inheritance:
diff --git a/docs/source/pype.plugins.maya.publish.rst b/docs/source/pype.plugins.maya.publish.rst
deleted file mode 100644
index dba0a9118c..0000000000
--- a/docs/source/pype.plugins.maya.publish.rst
+++ /dev/null
@@ -1,146 +0,0 @@
-pype.plugins.maya.publish package
-=================================
-
-.. automodule:: pype.plugins.maya.publish
- :members:
- :undoc-members:
- :show-inheritance:
-
-Submodules
-----------
-
-.. toctree::
- :maxdepth: 10
-
- pype.plugins.maya.publish.collect_animation
- pype.plugins.maya.publish.collect_ass
- pype.plugins.maya.publish.collect_assembly
- pype.plugins.maya.publish.collect_file_dependencies
- pype.plugins.maya.publish.collect_ftrack_family
- pype.plugins.maya.publish.collect_history
- pype.plugins.maya.publish.collect_instances
- pype.plugins.maya.publish.collect_look
- pype.plugins.maya.publish.collect_maya_units
- pype.plugins.maya.publish.collect_maya_workspace
- pype.plugins.maya.publish.collect_mayaascii
- pype.plugins.maya.publish.collect_model
- pype.plugins.maya.publish.collect_remove_marked
- pype.plugins.maya.publish.collect_render
- pype.plugins.maya.publish.collect_render_layer_aovs
- pype.plugins.maya.publish.collect_renderable_camera
- pype.plugins.maya.publish.collect_review
- pype.plugins.maya.publish.collect_rig
- pype.plugins.maya.publish.collect_scene
- pype.plugins.maya.publish.collect_unreal_staticmesh
- pype.plugins.maya.publish.collect_workscene_fps
- pype.plugins.maya.publish.collect_yeti_cache
- pype.plugins.maya.publish.collect_yeti_rig
- pype.plugins.maya.publish.determine_future_version
- pype.plugins.maya.publish.extract_animation
- pype.plugins.maya.publish.extract_ass
- pype.plugins.maya.publish.extract_assembly
- pype.plugins.maya.publish.extract_assproxy
- pype.plugins.maya.publish.extract_camera_alembic
- pype.plugins.maya.publish.extract_camera_mayaScene
- pype.plugins.maya.publish.extract_fbx
- pype.plugins.maya.publish.extract_look
- pype.plugins.maya.publish.extract_maya_scene_raw
- pype.plugins.maya.publish.extract_model
- pype.plugins.maya.publish.extract_playblast
- pype.plugins.maya.publish.extract_pointcache
- pype.plugins.maya.publish.extract_rendersetup
- pype.plugins.maya.publish.extract_rig
- pype.plugins.maya.publish.extract_thumbnail
- pype.plugins.maya.publish.extract_vrayproxy
- pype.plugins.maya.publish.extract_yeti_cache
- pype.plugins.maya.publish.extract_yeti_rig
- pype.plugins.maya.publish.increment_current_file_deadline
- pype.plugins.maya.publish.save_scene
- pype.plugins.maya.publish.submit_maya_deadline
- pype.plugins.maya.publish.submit_maya_muster
- pype.plugins.maya.publish.validate_animation_content
- pype.plugins.maya.publish.validate_animation_out_set_related_node_ids
- pype.plugins.maya.publish.validate_ass_relative_paths
- pype.plugins.maya.publish.validate_assembly_name
- pype.plugins.maya.publish.validate_assembly_namespaces
- pype.plugins.maya.publish.validate_assembly_transforms
- pype.plugins.maya.publish.validate_attributes
- pype.plugins.maya.publish.validate_camera_attributes
- pype.plugins.maya.publish.validate_camera_contents
- pype.plugins.maya.publish.validate_color_sets
- pype.plugins.maya.publish.validate_current_renderlayer_renderable
- pype.plugins.maya.publish.validate_deadline_connection
- pype.plugins.maya.publish.validate_frame_range
- pype.plugins.maya.publish.validate_instance_has_members
- pype.plugins.maya.publish.validate_instance_subset
- pype.plugins.maya.publish.validate_instancer_content
- pype.plugins.maya.publish.validate_instancer_frame_ranges
- pype.plugins.maya.publish.validate_joints_hidden
- pype.plugins.maya.publish.validate_look_contents
- pype.plugins.maya.publish.validate_look_default_shaders_connections
- pype.plugins.maya.publish.validate_look_id_reference_edits
- pype.plugins.maya.publish.validate_look_members_unique
- pype.plugins.maya.publish.validate_look_no_default_shaders
- pype.plugins.maya.publish.validate_look_sets
- pype.plugins.maya.publish.validate_look_shading_group
- pype.plugins.maya.publish.validate_look_single_shader
- pype.plugins.maya.publish.validate_maya_units
- pype.plugins.maya.publish.validate_mesh_arnold_attributes
- pype.plugins.maya.publish.validate_mesh_has_uv
- pype.plugins.maya.publish.validate_mesh_lamina_faces
- pype.plugins.maya.publish.validate_mesh_no_negative_scale
- pype.plugins.maya.publish.validate_mesh_non_manifold
- pype.plugins.maya.publish.validate_mesh_non_zero_edge
- pype.plugins.maya.publish.validate_mesh_normals_unlocked
- pype.plugins.maya.publish.validate_mesh_overlapping_uvs
- pype.plugins.maya.publish.validate_mesh_shader_connections
- pype.plugins.maya.publish.validate_mesh_single_uv_set
- pype.plugins.maya.publish.validate_mesh_uv_set_map1
- pype.plugins.maya.publish.validate_mesh_vertices_have_edges
- pype.plugins.maya.publish.validate_model_content
- pype.plugins.maya.publish.validate_model_name
- pype.plugins.maya.publish.validate_muster_connection
- pype.plugins.maya.publish.validate_no_animation
- pype.plugins.maya.publish.validate_no_default_camera
- pype.plugins.maya.publish.validate_no_namespace
- pype.plugins.maya.publish.validate_no_null_transforms
- pype.plugins.maya.publish.validate_no_unknown_nodes
- pype.plugins.maya.publish.validate_no_vraymesh
- pype.plugins.maya.publish.validate_node_ids
- pype.plugins.maya.publish.validate_node_ids_deformed_shapes
- pype.plugins.maya.publish.validate_node_ids_in_database
- pype.plugins.maya.publish.validate_node_ids_related
- pype.plugins.maya.publish.validate_node_ids_unique
- pype.plugins.maya.publish.validate_node_no_ghosting
- pype.plugins.maya.publish.validate_render_image_rule
- pype.plugins.maya.publish.validate_render_no_default_cameras
- pype.plugins.maya.publish.validate_render_single_camera
- pype.plugins.maya.publish.validate_renderlayer_aovs
- pype.plugins.maya.publish.validate_rendersettings
- pype.plugins.maya.publish.validate_resources
- pype.plugins.maya.publish.validate_rig_contents
- pype.plugins.maya.publish.validate_rig_controllers
- pype.plugins.maya.publish.validate_rig_controllers_arnold_attributes
- pype.plugins.maya.publish.validate_rig_out_set_node_ids
- pype.plugins.maya.publish.validate_rig_output_ids
- pype.plugins.maya.publish.validate_scene_set_workspace
- pype.plugins.maya.publish.validate_shader_name
- pype.plugins.maya.publish.validate_shape_default_names
- pype.plugins.maya.publish.validate_shape_render_stats
- pype.plugins.maya.publish.validate_single_assembly
- pype.plugins.maya.publish.validate_skinCluster_deformer_set
- pype.plugins.maya.publish.validate_step_size
- pype.plugins.maya.publish.validate_transform_naming_suffix
- pype.plugins.maya.publish.validate_transform_zero
- pype.plugins.maya.publish.validate_unicode_strings
- pype.plugins.maya.publish.validate_unreal_mesh_triangulated
- pype.plugins.maya.publish.validate_unreal_staticmesh_naming
- pype.plugins.maya.publish.validate_unreal_up_axis
- pype.plugins.maya.publish.validate_vray_distributed_rendering
- pype.plugins.maya.publish.validate_vray_translator_settings
- pype.plugins.maya.publish.validate_vrayproxy
- pype.plugins.maya.publish.validate_vrayproxy_members
- pype.plugins.maya.publish.validate_yeti_renderscript_callbacks
- pype.plugins.maya.publish.validate_yeti_rig_cache_state
- pype.plugins.maya.publish.validate_yeti_rig_input_in_instance
- pype.plugins.maya.publish.validate_yeti_rig_settings
diff --git a/docs/source/pype.plugins.maya.publish.save_scene.rst b/docs/source/pype.plugins.maya.publish.save_scene.rst
deleted file mode 100644
index 2537bca03d..0000000000
--- a/docs/source/pype.plugins.maya.publish.save_scene.rst
+++ /dev/null
@@ -1,7 +0,0 @@
-pype.plugins.maya.publish.save\_scene module
-============================================
-
-.. automodule:: pype.plugins.maya.publish.save_scene
- :members:
- :undoc-members:
- :show-inheritance:
diff --git a/docs/source/pype.plugins.maya.publish.submit_maya_deadline.rst b/docs/source/pype.plugins.maya.publish.submit_maya_deadline.rst
deleted file mode 100644
index 0e521cec4e..0000000000
--- a/docs/source/pype.plugins.maya.publish.submit_maya_deadline.rst
+++ /dev/null
@@ -1,7 +0,0 @@
-pype.plugins.maya.publish.submit\_maya\_deadline module
-=======================================================
-
-.. automodule:: pype.plugins.maya.publish.submit_maya_deadline
- :members:
- :undoc-members:
- :show-inheritance:
diff --git a/docs/source/pype.plugins.maya.publish.submit_maya_muster.rst b/docs/source/pype.plugins.maya.publish.submit_maya_muster.rst
deleted file mode 100644
index 4ae263e157..0000000000
--- a/docs/source/pype.plugins.maya.publish.submit_maya_muster.rst
+++ /dev/null
@@ -1,7 +0,0 @@
-pype.plugins.maya.publish.submit\_maya\_muster module
-=====================================================
-
-.. automodule:: pype.plugins.maya.publish.submit_maya_muster
- :members:
- :undoc-members:
- :show-inheritance:
diff --git a/docs/source/pype.plugins.maya.publish.validate_animation_content.rst b/docs/source/pype.plugins.maya.publish.validate_animation_content.rst
deleted file mode 100644
index 65191bb957..0000000000
--- a/docs/source/pype.plugins.maya.publish.validate_animation_content.rst
+++ /dev/null
@@ -1,7 +0,0 @@
-pype.plugins.maya.publish.validate\_animation\_content module
-=============================================================
-
-.. automodule:: pype.plugins.maya.publish.validate_animation_content
- :members:
- :undoc-members:
- :show-inheritance:
diff --git a/docs/source/pype.plugins.maya.publish.validate_animation_out_set_related_node_ids.rst b/docs/source/pype.plugins.maya.publish.validate_animation_out_set_related_node_ids.rst
deleted file mode 100644
index ea289e84ed..0000000000
--- a/docs/source/pype.plugins.maya.publish.validate_animation_out_set_related_node_ids.rst
+++ /dev/null
@@ -1,7 +0,0 @@
-pype.plugins.maya.publish.validate\_animation\_out\_set\_related\_node\_ids module
-==================================================================================
-
-.. automodule:: pype.plugins.maya.publish.validate_animation_out_set_related_node_ids
- :members:
- :undoc-members:
- :show-inheritance:
diff --git a/docs/source/pype.plugins.maya.publish.validate_ass_relative_paths.rst b/docs/source/pype.plugins.maya.publish.validate_ass_relative_paths.rst
deleted file mode 100644
index f35ef916cc..0000000000
--- a/docs/source/pype.plugins.maya.publish.validate_ass_relative_paths.rst
+++ /dev/null
@@ -1,7 +0,0 @@
-pype.plugins.maya.publish.validate\_ass\_relative\_paths module
-===============================================================
-
-.. automodule:: pype.plugins.maya.publish.validate_ass_relative_paths
- :members:
- :undoc-members:
- :show-inheritance:
diff --git a/docs/source/pype.plugins.maya.publish.validate_assembly_name.rst b/docs/source/pype.plugins.maya.publish.validate_assembly_name.rst
deleted file mode 100644
index c8178226b2..0000000000
--- a/docs/source/pype.plugins.maya.publish.validate_assembly_name.rst
+++ /dev/null
@@ -1,7 +0,0 @@
-pype.plugins.maya.publish.validate\_assembly\_name module
-=========================================================
-
-.. automodule:: pype.plugins.maya.publish.validate_assembly_name
- :members:
- :undoc-members:
- :show-inheritance:
diff --git a/docs/source/pype.plugins.maya.publish.validate_assembly_namespaces.rst b/docs/source/pype.plugins.maya.publish.validate_assembly_namespaces.rst
deleted file mode 100644
index 847b90281e..0000000000
--- a/docs/source/pype.plugins.maya.publish.validate_assembly_namespaces.rst
+++ /dev/null
@@ -1,7 +0,0 @@
-pype.plugins.maya.publish.validate\_assembly\_namespaces module
-===============================================================
-
-.. automodule:: pype.plugins.maya.publish.validate_assembly_namespaces
- :members:
- :undoc-members:
- :show-inheritance:
diff --git a/docs/source/pype.plugins.maya.publish.validate_assembly_transforms.rst b/docs/source/pype.plugins.maya.publish.validate_assembly_transforms.rst
deleted file mode 100644
index b4348a2908..0000000000
--- a/docs/source/pype.plugins.maya.publish.validate_assembly_transforms.rst
+++ /dev/null
@@ -1,7 +0,0 @@
-pype.plugins.maya.publish.validate\_assembly\_transforms module
-===============================================================
-
-.. automodule:: pype.plugins.maya.publish.validate_assembly_transforms
- :members:
- :undoc-members:
- :show-inheritance:
diff --git a/docs/source/pype.plugins.maya.publish.validate_attributes.rst b/docs/source/pype.plugins.maya.publish.validate_attributes.rst
deleted file mode 100644
index 862820a7c0..0000000000
--- a/docs/source/pype.plugins.maya.publish.validate_attributes.rst
+++ /dev/null
@@ -1,7 +0,0 @@
-pype.plugins.maya.publish.validate\_attributes module
-=====================================================
-
-.. automodule:: pype.plugins.maya.publish.validate_attributes
- :members:
- :undoc-members:
- :show-inheritance:
diff --git a/docs/source/pype.plugins.maya.publish.validate_camera_attributes.rst b/docs/source/pype.plugins.maya.publish.validate_camera_attributes.rst
deleted file mode 100644
index 054198f812..0000000000
--- a/docs/source/pype.plugins.maya.publish.validate_camera_attributes.rst
+++ /dev/null
@@ -1,7 +0,0 @@
-pype.plugins.maya.publish.validate\_camera\_attributes module
-=============================================================
-
-.. automodule:: pype.plugins.maya.publish.validate_camera_attributes
- :members:
- :undoc-members:
- :show-inheritance:
diff --git a/docs/source/pype.plugins.maya.publish.validate_camera_contents.rst b/docs/source/pype.plugins.maya.publish.validate_camera_contents.rst
deleted file mode 100644
index 9cf6604f7a..0000000000
--- a/docs/source/pype.plugins.maya.publish.validate_camera_contents.rst
+++ /dev/null
@@ -1,7 +0,0 @@
-pype.plugins.maya.publish.validate\_camera\_contents module
-===========================================================
-
-.. automodule:: pype.plugins.maya.publish.validate_camera_contents
- :members:
- :undoc-members:
- :show-inheritance:
diff --git a/docs/source/pype.plugins.maya.publish.validate_color_sets.rst b/docs/source/pype.plugins.maya.publish.validate_color_sets.rst
deleted file mode 100644
index 59bb5607bf..0000000000
--- a/docs/source/pype.plugins.maya.publish.validate_color_sets.rst
+++ /dev/null
@@ -1,7 +0,0 @@
-pype.plugins.maya.publish.validate\_color\_sets module
-======================================================
-
-.. automodule:: pype.plugins.maya.publish.validate_color_sets
- :members:
- :undoc-members:
- :show-inheritance:
diff --git a/docs/source/pype.plugins.maya.publish.validate_current_renderlayer_renderable.rst b/docs/source/pype.plugins.maya.publish.validate_current_renderlayer_renderable.rst
deleted file mode 100644
index 31c52477aa..0000000000
--- a/docs/source/pype.plugins.maya.publish.validate_current_renderlayer_renderable.rst
+++ /dev/null
@@ -1,7 +0,0 @@
-pype.plugins.maya.publish.validate\_current\_renderlayer\_renderable module
-===========================================================================
-
-.. automodule:: pype.plugins.maya.publish.validate_current_renderlayer_renderable
- :members:
- :undoc-members:
- :show-inheritance:
diff --git a/docs/source/pype.plugins.maya.publish.validate_deadline_connection.rst b/docs/source/pype.plugins.maya.publish.validate_deadline_connection.rst
deleted file mode 100644
index 3f8c4b6313..0000000000
--- a/docs/source/pype.plugins.maya.publish.validate_deadline_connection.rst
+++ /dev/null
@@ -1,7 +0,0 @@
-pype.plugins.maya.publish.validate\_deadline\_connection module
-===============================================================
-
-.. automodule:: pype.plugins.maya.publish.validate_deadline_connection
- :members:
- :undoc-members:
- :show-inheritance:
diff --git a/docs/source/pype.plugins.maya.publish.validate_frame_range.rst b/docs/source/pype.plugins.maya.publish.validate_frame_range.rst
deleted file mode 100644
index 0ccc8ed1cd..0000000000
--- a/docs/source/pype.plugins.maya.publish.validate_frame_range.rst
+++ /dev/null
@@ -1,7 +0,0 @@
-pype.plugins.maya.publish.validate\_frame\_range module
-=======================================================
-
-.. automodule:: pype.plugins.maya.publish.validate_frame_range
- :members:
- :undoc-members:
- :show-inheritance:
diff --git a/docs/source/pype.plugins.maya.publish.validate_instance_has_members.rst b/docs/source/pype.plugins.maya.publish.validate_instance_has_members.rst
deleted file mode 100644
index 862d32f114..0000000000
--- a/docs/source/pype.plugins.maya.publish.validate_instance_has_members.rst
+++ /dev/null
@@ -1,7 +0,0 @@
-pype.plugins.maya.publish.validate\_instance\_has\_members module
-=================================================================
-
-.. automodule:: pype.plugins.maya.publish.validate_instance_has_members
- :members:
- :undoc-members:
- :show-inheritance:
diff --git a/docs/source/pype.plugins.maya.publish.validate_instance_subset.rst b/docs/source/pype.plugins.maya.publish.validate_instance_subset.rst
deleted file mode 100644
index f71febb73c..0000000000
--- a/docs/source/pype.plugins.maya.publish.validate_instance_subset.rst
+++ /dev/null
@@ -1,7 +0,0 @@
-pype.plugins.maya.publish.validate\_instance\_subset module
-===========================================================
-
-.. automodule:: pype.plugins.maya.publish.validate_instance_subset
- :members:
- :undoc-members:
- :show-inheritance:
diff --git a/docs/source/pype.plugins.maya.publish.validate_instancer_content.rst b/docs/source/pype.plugins.maya.publish.validate_instancer_content.rst
deleted file mode 100644
index 761889dd4d..0000000000
--- a/docs/source/pype.plugins.maya.publish.validate_instancer_content.rst
+++ /dev/null
@@ -1,7 +0,0 @@
-pype.plugins.maya.publish.validate\_instancer\_content module
-=============================================================
-
-.. automodule:: pype.plugins.maya.publish.validate_instancer_content
- :members:
- :undoc-members:
- :show-inheritance:
diff --git a/docs/source/pype.plugins.maya.publish.validate_instancer_frame_ranges.rst b/docs/source/pype.plugins.maya.publish.validate_instancer_frame_ranges.rst
deleted file mode 100644
index 85338c3e2d..0000000000
--- a/docs/source/pype.plugins.maya.publish.validate_instancer_frame_ranges.rst
+++ /dev/null
@@ -1,7 +0,0 @@
-pype.plugins.maya.publish.validate\_instancer\_frame\_ranges module
-===================================================================
-
-.. automodule:: pype.plugins.maya.publish.validate_instancer_frame_ranges
- :members:
- :undoc-members:
- :show-inheritance:
diff --git a/docs/source/pype.plugins.maya.publish.validate_joints_hidden.rst b/docs/source/pype.plugins.maya.publish.validate_joints_hidden.rst
deleted file mode 100644
index ede5af0c67..0000000000
--- a/docs/source/pype.plugins.maya.publish.validate_joints_hidden.rst
+++ /dev/null
@@ -1,7 +0,0 @@
-pype.plugins.maya.publish.validate\_joints\_hidden module
-=========================================================
-
-.. automodule:: pype.plugins.maya.publish.validate_joints_hidden
- :members:
- :undoc-members:
- :show-inheritance:
diff --git a/docs/source/pype.plugins.maya.publish.validate_look_contents.rst b/docs/source/pype.plugins.maya.publish.validate_look_contents.rst
deleted file mode 100644
index 946f924fb3..0000000000
--- a/docs/source/pype.plugins.maya.publish.validate_look_contents.rst
+++ /dev/null
@@ -1,7 +0,0 @@
-pype.plugins.maya.publish.validate\_look\_contents module
-=========================================================
-
-.. automodule:: pype.plugins.maya.publish.validate_look_contents
- :members:
- :undoc-members:
- :show-inheritance:
diff --git a/docs/source/pype.plugins.maya.publish.validate_look_default_shaders_connections.rst b/docs/source/pype.plugins.maya.publish.validate_look_default_shaders_connections.rst
deleted file mode 100644
index e293cfc0f1..0000000000
--- a/docs/source/pype.plugins.maya.publish.validate_look_default_shaders_connections.rst
+++ /dev/null
@@ -1,7 +0,0 @@
-pype.plugins.maya.publish.validate\_look\_default\_shaders\_connections module
-==============================================================================
-
-.. automodule:: pype.plugins.maya.publish.validate_look_default_shaders_connections
- :members:
- :undoc-members:
- :show-inheritance:
diff --git a/docs/source/pype.plugins.maya.publish.validate_look_id_reference_edits.rst b/docs/source/pype.plugins.maya.publish.validate_look_id_reference_edits.rst
deleted file mode 100644
index 007f4e2d03..0000000000
--- a/docs/source/pype.plugins.maya.publish.validate_look_id_reference_edits.rst
+++ /dev/null
@@ -1,7 +0,0 @@
-pype.plugins.maya.publish.validate\_look\_id\_reference\_edits module
-=====================================================================
-
-.. automodule:: pype.plugins.maya.publish.validate_look_id_reference_edits
- :members:
- :undoc-members:
- :show-inheritance:
diff --git a/docs/source/pype.plugins.maya.publish.validate_look_members_unique.rst b/docs/source/pype.plugins.maya.publish.validate_look_members_unique.rst
deleted file mode 100644
index 3378e8a0f6..0000000000
--- a/docs/source/pype.plugins.maya.publish.validate_look_members_unique.rst
+++ /dev/null
@@ -1,7 +0,0 @@
-pype.plugins.maya.publish.validate\_look\_members\_unique module
-================================================================
-
-.. automodule:: pype.plugins.maya.publish.validate_look_members_unique
- :members:
- :undoc-members:
- :show-inheritance:
diff --git a/docs/source/pype.plugins.maya.publish.validate_look_no_default_shaders.rst b/docs/source/pype.plugins.maya.publish.validate_look_no_default_shaders.rst
deleted file mode 100644
index 662e2c7621..0000000000
--- a/docs/source/pype.plugins.maya.publish.validate_look_no_default_shaders.rst
+++ /dev/null
@@ -1,7 +0,0 @@
-pype.plugins.maya.publish.validate\_look\_no\_default\_shaders module
-=====================================================================
-
-.. automodule:: pype.plugins.maya.publish.validate_look_no_default_shaders
- :members:
- :undoc-members:
- :show-inheritance:
diff --git a/docs/source/pype.plugins.maya.publish.validate_look_sets.rst b/docs/source/pype.plugins.maya.publish.validate_look_sets.rst
deleted file mode 100644
index 5427331568..0000000000
--- a/docs/source/pype.plugins.maya.publish.validate_look_sets.rst
+++ /dev/null
@@ -1,7 +0,0 @@
-pype.plugins.maya.publish.validate\_look\_sets module
-=====================================================
-
-.. automodule:: pype.plugins.maya.publish.validate_look_sets
- :members:
- :undoc-members:
- :show-inheritance:
diff --git a/docs/source/pype.plugins.maya.publish.validate_look_shading_group.rst b/docs/source/pype.plugins.maya.publish.validate_look_shading_group.rst
deleted file mode 100644
index 259f4952b7..0000000000
--- a/docs/source/pype.plugins.maya.publish.validate_look_shading_group.rst
+++ /dev/null
@@ -1,7 +0,0 @@
-pype.plugins.maya.publish.validate\_look\_shading\_group module
-===============================================================
-
-.. automodule:: pype.plugins.maya.publish.validate_look_shading_group
- :members:
- :undoc-members:
- :show-inheritance:
diff --git a/docs/source/pype.plugins.maya.publish.validate_look_single_shader.rst b/docs/source/pype.plugins.maya.publish.validate_look_single_shader.rst
deleted file mode 100644
index fa43283416..0000000000
--- a/docs/source/pype.plugins.maya.publish.validate_look_single_shader.rst
+++ /dev/null
@@ -1,7 +0,0 @@
-pype.plugins.maya.publish.validate\_look\_single\_shader module
-===============================================================
-
-.. automodule:: pype.plugins.maya.publish.validate_look_single_shader
- :members:
- :undoc-members:
- :show-inheritance:
diff --git a/docs/source/pype.plugins.maya.publish.validate_maya_units.rst b/docs/source/pype.plugins.maya.publish.validate_maya_units.rst
deleted file mode 100644
index 16af19f6d9..0000000000
--- a/docs/source/pype.plugins.maya.publish.validate_maya_units.rst
+++ /dev/null
@@ -1,7 +0,0 @@
-pype.plugins.maya.publish.validate\_maya\_units module
-======================================================
-
-.. automodule:: pype.plugins.maya.publish.validate_maya_units
- :members:
- :undoc-members:
- :show-inheritance:
diff --git a/docs/source/pype.plugins.maya.publish.validate_mesh_arnold_attributes.rst b/docs/source/pype.plugins.maya.publish.validate_mesh_arnold_attributes.rst
deleted file mode 100644
index ef18ad1457..0000000000
--- a/docs/source/pype.plugins.maya.publish.validate_mesh_arnold_attributes.rst
+++ /dev/null
@@ -1,7 +0,0 @@
-pype.plugins.maya.publish.validate\_mesh\_arnold\_attributes module
-===================================================================
-
-.. automodule:: pype.plugins.maya.publish.validate_mesh_arnold_attributes
- :members:
- :undoc-members:
- :show-inheritance:
diff --git a/docs/source/pype.plugins.maya.publish.validate_mesh_has_uv.rst b/docs/source/pype.plugins.maya.publish.validate_mesh_has_uv.rst
deleted file mode 100644
index c6af7063c3..0000000000
--- a/docs/source/pype.plugins.maya.publish.validate_mesh_has_uv.rst
+++ /dev/null
@@ -1,7 +0,0 @@
-pype.plugins.maya.publish.validate\_mesh\_has\_uv module
-========================================================
-
-.. automodule:: pype.plugins.maya.publish.validate_mesh_has_uv
- :members:
- :undoc-members:
- :show-inheritance:
diff --git a/docs/source/pype.plugins.maya.publish.validate_mesh_lamina_faces.rst b/docs/source/pype.plugins.maya.publish.validate_mesh_lamina_faces.rst
deleted file mode 100644
index 006488e77f..0000000000
--- a/docs/source/pype.plugins.maya.publish.validate_mesh_lamina_faces.rst
+++ /dev/null
@@ -1,7 +0,0 @@
-pype.plugins.maya.publish.validate\_mesh\_lamina\_faces module
-==============================================================
-
-.. automodule:: pype.plugins.maya.publish.validate_mesh_lamina_faces
- :members:
- :undoc-members:
- :show-inheritance:
diff --git a/docs/source/pype.plugins.maya.publish.validate_mesh_no_negative_scale.rst b/docs/source/pype.plugins.maya.publish.validate_mesh_no_negative_scale.rst
deleted file mode 100644
index 8720f3d018..0000000000
--- a/docs/source/pype.plugins.maya.publish.validate_mesh_no_negative_scale.rst
+++ /dev/null
@@ -1,7 +0,0 @@
-pype.plugins.maya.publish.validate\_mesh\_no\_negative\_scale module
-====================================================================
-
-.. automodule:: pype.plugins.maya.publish.validate_mesh_no_negative_scale
- :members:
- :undoc-members:
- :show-inheritance:
diff --git a/docs/source/pype.plugins.maya.publish.validate_mesh_non_manifold.rst b/docs/source/pype.plugins.maya.publish.validate_mesh_non_manifold.rst
deleted file mode 100644
index a69a4c6fc4..0000000000
--- a/docs/source/pype.plugins.maya.publish.validate_mesh_non_manifold.rst
+++ /dev/null
@@ -1,7 +0,0 @@
-pype.plugins.maya.publish.validate\_mesh\_non\_manifold module
-==============================================================
-
-.. automodule:: pype.plugins.maya.publish.validate_mesh_non_manifold
- :members:
- :undoc-members:
- :show-inheritance:
diff --git a/docs/source/pype.plugins.maya.publish.validate_mesh_non_zero_edge.rst b/docs/source/pype.plugins.maya.publish.validate_mesh_non_zero_edge.rst
deleted file mode 100644
index 89ea60d1bc..0000000000
--- a/docs/source/pype.plugins.maya.publish.validate_mesh_non_zero_edge.rst
+++ /dev/null
@@ -1,7 +0,0 @@
-pype.plugins.maya.publish.validate\_mesh\_non\_zero\_edge module
-================================================================
-
-.. automodule:: pype.plugins.maya.publish.validate_mesh_non_zero_edge
- :members:
- :undoc-members:
- :show-inheritance:
diff --git a/docs/source/pype.plugins.maya.publish.validate_mesh_normals_unlocked.rst b/docs/source/pype.plugins.maya.publish.validate_mesh_normals_unlocked.rst
deleted file mode 100644
index 7dfbd0717d..0000000000
--- a/docs/source/pype.plugins.maya.publish.validate_mesh_normals_unlocked.rst
+++ /dev/null
@@ -1,7 +0,0 @@
-pype.plugins.maya.publish.validate\_mesh\_normals\_unlocked module
-==================================================================
-
-.. automodule:: pype.plugins.maya.publish.validate_mesh_normals_unlocked
- :members:
- :undoc-members:
- :show-inheritance:
diff --git a/docs/source/pype.plugins.maya.publish.validate_mesh_overlapping_uvs.rst b/docs/source/pype.plugins.maya.publish.validate_mesh_overlapping_uvs.rst
deleted file mode 100644
index f5df633124..0000000000
--- a/docs/source/pype.plugins.maya.publish.validate_mesh_overlapping_uvs.rst
+++ /dev/null
@@ -1,7 +0,0 @@
-pype.plugins.maya.publish.validate\_mesh\_overlapping\_uvs module
-=================================================================
-
-.. automodule:: pype.plugins.maya.publish.validate_mesh_overlapping_uvs
- :members:
- :undoc-members:
- :show-inheritance:
diff --git a/docs/source/pype.plugins.maya.publish.validate_mesh_shader_connections.rst b/docs/source/pype.plugins.maya.publish.validate_mesh_shader_connections.rst
deleted file mode 100644
index b3cd77ab2a..0000000000
--- a/docs/source/pype.plugins.maya.publish.validate_mesh_shader_connections.rst
+++ /dev/null
@@ -1,7 +0,0 @@
-pype.plugins.maya.publish.validate\_mesh\_shader\_connections module
-====================================================================
-
-.. automodule:: pype.plugins.maya.publish.validate_mesh_shader_connections
- :members:
- :undoc-members:
- :show-inheritance:
diff --git a/docs/source/pype.plugins.maya.publish.validate_mesh_single_uv_set.rst b/docs/source/pype.plugins.maya.publish.validate_mesh_single_uv_set.rst
deleted file mode 100644
index 29a1217437..0000000000
--- a/docs/source/pype.plugins.maya.publish.validate_mesh_single_uv_set.rst
+++ /dev/null
@@ -1,7 +0,0 @@
-pype.plugins.maya.publish.validate\_mesh\_single\_uv\_set module
-================================================================
-
-.. automodule:: pype.plugins.maya.publish.validate_mesh_single_uv_set
- :members:
- :undoc-members:
- :show-inheritance:
diff --git a/docs/source/pype.plugins.maya.publish.validate_mesh_uv_set_map1.rst b/docs/source/pype.plugins.maya.publish.validate_mesh_uv_set_map1.rst
deleted file mode 100644
index 49d1b22497..0000000000
--- a/docs/source/pype.plugins.maya.publish.validate_mesh_uv_set_map1.rst
+++ /dev/null
@@ -1,7 +0,0 @@
-pype.plugins.maya.publish.validate\_mesh\_uv\_set\_map1 module
-==============================================================
-
-.. automodule:: pype.plugins.maya.publish.validate_mesh_uv_set_map1
- :members:
- :undoc-members:
- :show-inheritance:
diff --git a/docs/source/pype.plugins.maya.publish.validate_mesh_vertices_have_edges.rst b/docs/source/pype.plugins.maya.publish.validate_mesh_vertices_have_edges.rst
deleted file mode 100644
index 99e3047e3d..0000000000
--- a/docs/source/pype.plugins.maya.publish.validate_mesh_vertices_have_edges.rst
+++ /dev/null
@@ -1,7 +0,0 @@
-pype.plugins.maya.publish.validate\_mesh\_vertices\_have\_edges module
-======================================================================
-
-.. automodule:: pype.plugins.maya.publish.validate_mesh_vertices_have_edges
- :members:
- :undoc-members:
- :show-inheritance:
diff --git a/docs/source/pype.plugins.maya.publish.validate_model_content.rst b/docs/source/pype.plugins.maya.publish.validate_model_content.rst
deleted file mode 100644
index dc0a415718..0000000000
--- a/docs/source/pype.plugins.maya.publish.validate_model_content.rst
+++ /dev/null
@@ -1,7 +0,0 @@
-pype.plugins.maya.publish.validate\_model\_content module
-=========================================================
-
-.. automodule:: pype.plugins.maya.publish.validate_model_content
- :members:
- :undoc-members:
- :show-inheritance:
diff --git a/docs/source/pype.plugins.maya.publish.validate_model_name.rst b/docs/source/pype.plugins.maya.publish.validate_model_name.rst
deleted file mode 100644
index ea78ceea70..0000000000
--- a/docs/source/pype.plugins.maya.publish.validate_model_name.rst
+++ /dev/null
@@ -1,7 +0,0 @@
-pype.plugins.maya.publish.validate\_model\_name module
-======================================================
-
-.. automodule:: pype.plugins.maya.publish.validate_model_name
- :members:
- :undoc-members:
- :show-inheritance:
diff --git a/docs/source/pype.plugins.maya.publish.validate_muster_connection.rst b/docs/source/pype.plugins.maya.publish.validate_muster_connection.rst
deleted file mode 100644
index 4a4a1e926b..0000000000
--- a/docs/source/pype.plugins.maya.publish.validate_muster_connection.rst
+++ /dev/null
@@ -1,7 +0,0 @@
-pype.plugins.maya.publish.validate\_muster\_connection module
-=============================================================
-
-.. automodule:: pype.plugins.maya.publish.validate_muster_connection
- :members:
- :undoc-members:
- :show-inheritance:
diff --git a/docs/source/pype.plugins.maya.publish.validate_no_animation.rst b/docs/source/pype.plugins.maya.publish.validate_no_animation.rst
deleted file mode 100644
index b42021369d..0000000000
--- a/docs/source/pype.plugins.maya.publish.validate_no_animation.rst
+++ /dev/null
@@ -1,7 +0,0 @@
-pype.plugins.maya.publish.validate\_no\_animation module
-========================================================
-
-.. automodule:: pype.plugins.maya.publish.validate_no_animation
- :members:
- :undoc-members:
- :show-inheritance:
diff --git a/docs/source/pype.plugins.maya.publish.validate_no_default_camera.rst b/docs/source/pype.plugins.maya.publish.validate_no_default_camera.rst
deleted file mode 100644
index 59544369f6..0000000000
--- a/docs/source/pype.plugins.maya.publish.validate_no_default_camera.rst
+++ /dev/null
@@ -1,7 +0,0 @@
-pype.plugins.maya.publish.validate\_no\_default\_camera module
-==============================================================
-
-.. automodule:: pype.plugins.maya.publish.validate_no_default_camera
- :members:
- :undoc-members:
- :show-inheritance:
diff --git a/docs/source/pype.plugins.maya.publish.validate_no_namespace.rst b/docs/source/pype.plugins.maya.publish.validate_no_namespace.rst
deleted file mode 100644
index bdf4ceb324..0000000000
--- a/docs/source/pype.plugins.maya.publish.validate_no_namespace.rst
+++ /dev/null
@@ -1,7 +0,0 @@
-pype.plugins.maya.publish.validate\_no\_namespace module
-========================================================
-
-.. automodule:: pype.plugins.maya.publish.validate_no_namespace
- :members:
- :undoc-members:
- :show-inheritance:
diff --git a/docs/source/pype.plugins.maya.publish.validate_no_null_transforms.rst b/docs/source/pype.plugins.maya.publish.validate_no_null_transforms.rst
deleted file mode 100644
index 12beed8c33..0000000000
--- a/docs/source/pype.plugins.maya.publish.validate_no_null_transforms.rst
+++ /dev/null
@@ -1,7 +0,0 @@
-pype.plugins.maya.publish.validate\_no\_null\_transforms module
-===============================================================
-
-.. automodule:: pype.plugins.maya.publish.validate_no_null_transforms
- :members:
- :undoc-members:
- :show-inheritance:
diff --git a/docs/source/pype.plugins.maya.publish.validate_no_unknown_nodes.rst b/docs/source/pype.plugins.maya.publish.validate_no_unknown_nodes.rst
deleted file mode 100644
index 12c977dbb9..0000000000
--- a/docs/source/pype.plugins.maya.publish.validate_no_unknown_nodes.rst
+++ /dev/null
@@ -1,7 +0,0 @@
-pype.plugins.maya.publish.validate\_no\_unknown\_nodes module
-=============================================================
-
-.. automodule:: pype.plugins.maya.publish.validate_no_unknown_nodes
- :members:
- :undoc-members:
- :show-inheritance:
diff --git a/docs/source/pype.plugins.maya.publish.validate_no_vraymesh.rst b/docs/source/pype.plugins.maya.publish.validate_no_vraymesh.rst
deleted file mode 100644
index a1a0b9ee64..0000000000
--- a/docs/source/pype.plugins.maya.publish.validate_no_vraymesh.rst
+++ /dev/null
@@ -1,7 +0,0 @@
-pype.plugins.maya.publish.validate\_no\_vraymesh module
-=======================================================
-
-.. automodule:: pype.plugins.maya.publish.validate_no_vraymesh
- :members:
- :undoc-members:
- :show-inheritance:
diff --git a/docs/source/pype.plugins.maya.publish.validate_node_ids.rst b/docs/source/pype.plugins.maya.publish.validate_node_ids.rst
deleted file mode 100644
index 7b1d79100f..0000000000
--- a/docs/source/pype.plugins.maya.publish.validate_node_ids.rst
+++ /dev/null
@@ -1,7 +0,0 @@
-pype.plugins.maya.publish.validate\_node\_ids module
-====================================================
-
-.. automodule:: pype.plugins.maya.publish.validate_node_ids
- :members:
- :undoc-members:
- :show-inheritance:
diff --git a/docs/source/pype.plugins.maya.publish.validate_node_ids_deformed_shapes.rst b/docs/source/pype.plugins.maya.publish.validate_node_ids_deformed_shapes.rst
deleted file mode 100644
index 90ef81c5b5..0000000000
--- a/docs/source/pype.plugins.maya.publish.validate_node_ids_deformed_shapes.rst
+++ /dev/null
@@ -1,7 +0,0 @@
-pype.plugins.maya.publish.validate\_node\_ids\_deformed\_shapes module
-======================================================================
-
-.. automodule:: pype.plugins.maya.publish.validate_node_ids_deformed_shapes
- :members:
- :undoc-members:
- :show-inheritance:
diff --git a/docs/source/pype.plugins.maya.publish.validate_node_ids_in_database.rst b/docs/source/pype.plugins.maya.publish.validate_node_ids_in_database.rst
deleted file mode 100644
index 5eb0047d16..0000000000
--- a/docs/source/pype.plugins.maya.publish.validate_node_ids_in_database.rst
+++ /dev/null
@@ -1,7 +0,0 @@
-pype.plugins.maya.publish.validate\_node\_ids\_in\_database module
-==================================================================
-
-.. automodule:: pype.plugins.maya.publish.validate_node_ids_in_database
- :members:
- :undoc-members:
- :show-inheritance:
diff --git a/docs/source/pype.plugins.maya.publish.validate_node_ids_related.rst b/docs/source/pype.plugins.maya.publish.validate_node_ids_related.rst
deleted file mode 100644
index 1f030462ae..0000000000
--- a/docs/source/pype.plugins.maya.publish.validate_node_ids_related.rst
+++ /dev/null
@@ -1,7 +0,0 @@
-pype.plugins.maya.publish.validate\_node\_ids\_related module
-=============================================================
-
-.. automodule:: pype.plugins.maya.publish.validate_node_ids_related
- :members:
- :undoc-members:
- :show-inheritance:
diff --git a/docs/source/pype.plugins.maya.publish.validate_node_ids_unique.rst b/docs/source/pype.plugins.maya.publish.validate_node_ids_unique.rst
deleted file mode 100644
index 20ba3a3a6d..0000000000
--- a/docs/source/pype.plugins.maya.publish.validate_node_ids_unique.rst
+++ /dev/null
@@ -1,7 +0,0 @@
-pype.plugins.maya.publish.validate\_node\_ids\_unique module
-============================================================
-
-.. automodule:: pype.plugins.maya.publish.validate_node_ids_unique
- :members:
- :undoc-members:
- :show-inheritance:
diff --git a/docs/source/pype.plugins.maya.publish.validate_node_no_ghosting.rst b/docs/source/pype.plugins.maya.publish.validate_node_no_ghosting.rst
deleted file mode 100644
index 8315888630..0000000000
--- a/docs/source/pype.plugins.maya.publish.validate_node_no_ghosting.rst
+++ /dev/null
@@ -1,7 +0,0 @@
-pype.plugins.maya.publish.validate\_node\_no\_ghosting module
-=============================================================
-
-.. automodule:: pype.plugins.maya.publish.validate_node_no_ghosting
- :members:
- :undoc-members:
- :show-inheritance:
diff --git a/docs/source/pype.plugins.maya.publish.validate_render_image_rule.rst b/docs/source/pype.plugins.maya.publish.validate_render_image_rule.rst
deleted file mode 100644
index 88870a9ea8..0000000000
--- a/docs/source/pype.plugins.maya.publish.validate_render_image_rule.rst
+++ /dev/null
@@ -1,7 +0,0 @@
-pype.plugins.maya.publish.validate\_render\_image\_rule module
-==============================================================
-
-.. automodule:: pype.plugins.maya.publish.validate_render_image_rule
- :members:
- :undoc-members:
- :show-inheritance:
diff --git a/docs/source/pype.plugins.maya.publish.validate_render_no_default_cameras.rst b/docs/source/pype.plugins.maya.publish.validate_render_no_default_cameras.rst
deleted file mode 100644
index b464dbeab6..0000000000
--- a/docs/source/pype.plugins.maya.publish.validate_render_no_default_cameras.rst
+++ /dev/null
@@ -1,7 +0,0 @@
-pype.plugins.maya.publish.validate\_render\_no\_default\_cameras module
-=======================================================================
-
-.. automodule:: pype.plugins.maya.publish.validate_render_no_default_cameras
- :members:
- :undoc-members:
- :show-inheritance:
diff --git a/docs/source/pype.plugins.maya.publish.validate_render_single_camera.rst b/docs/source/pype.plugins.maya.publish.validate_render_single_camera.rst
deleted file mode 100644
index 60a0cbd6fb..0000000000
--- a/docs/source/pype.plugins.maya.publish.validate_render_single_camera.rst
+++ /dev/null
@@ -1,7 +0,0 @@
-pype.plugins.maya.publish.validate\_render\_single\_camera module
-=================================================================
-
-.. automodule:: pype.plugins.maya.publish.validate_render_single_camera
- :members:
- :undoc-members:
- :show-inheritance:
diff --git a/docs/source/pype.plugins.maya.publish.validate_renderlayer_aovs.rst b/docs/source/pype.plugins.maya.publish.validate_renderlayer_aovs.rst
deleted file mode 100644
index 65d5181065..0000000000
--- a/docs/source/pype.plugins.maya.publish.validate_renderlayer_aovs.rst
+++ /dev/null
@@ -1,7 +0,0 @@
-pype.plugins.maya.publish.validate\_renderlayer\_aovs module
-============================================================
-
-.. automodule:: pype.plugins.maya.publish.validate_renderlayer_aovs
- :members:
- :undoc-members:
- :show-inheritance:
diff --git a/docs/source/pype.plugins.maya.publish.validate_rendersettings.rst b/docs/source/pype.plugins.maya.publish.validate_rendersettings.rst
deleted file mode 100644
index fce7dba5b8..0000000000
--- a/docs/source/pype.plugins.maya.publish.validate_rendersettings.rst
+++ /dev/null
@@ -1,7 +0,0 @@
-pype.plugins.maya.publish.validate\_rendersettings module
-=========================================================
-
-.. automodule:: pype.plugins.maya.publish.validate_rendersettings
- :members:
- :undoc-members:
- :show-inheritance:
diff --git a/docs/source/pype.plugins.maya.publish.validate_resources.rst b/docs/source/pype.plugins.maya.publish.validate_resources.rst
deleted file mode 100644
index 0a866acdbb..0000000000
--- a/docs/source/pype.plugins.maya.publish.validate_resources.rst
+++ /dev/null
@@ -1,7 +0,0 @@
-pype.plugins.maya.publish.validate\_resources module
-====================================================
-
-.. automodule:: pype.plugins.maya.publish.validate_resources
- :members:
- :undoc-members:
- :show-inheritance:
diff --git a/docs/source/pype.plugins.maya.publish.validate_rig_contents.rst b/docs/source/pype.plugins.maya.publish.validate_rig_contents.rst
deleted file mode 100644
index dbd7d84bed..0000000000
--- a/docs/source/pype.plugins.maya.publish.validate_rig_contents.rst
+++ /dev/null
@@ -1,7 +0,0 @@
-pype.plugins.maya.publish.validate\_rig\_contents module
-========================================================
-
-.. automodule:: pype.plugins.maya.publish.validate_rig_contents
- :members:
- :undoc-members:
- :show-inheritance:
diff --git a/docs/source/pype.plugins.maya.publish.validate_rig_controllers.rst b/docs/source/pype.plugins.maya.publish.validate_rig_controllers.rst
deleted file mode 100644
index 3bf075e8ad..0000000000
--- a/docs/source/pype.plugins.maya.publish.validate_rig_controllers.rst
+++ /dev/null
@@ -1,7 +0,0 @@
-pype.plugins.maya.publish.validate\_rig\_controllers module
-===========================================================
-
-.. automodule:: pype.plugins.maya.publish.validate_rig_controllers
- :members:
- :undoc-members:
- :show-inheritance:
diff --git a/docs/source/pype.plugins.maya.publish.validate_rig_controllers_arnold_attributes.rst b/docs/source/pype.plugins.maya.publish.validate_rig_controllers_arnold_attributes.rst
deleted file mode 100644
index 67e9256f3a..0000000000
--- a/docs/source/pype.plugins.maya.publish.validate_rig_controllers_arnold_attributes.rst
+++ /dev/null
@@ -1,7 +0,0 @@
-pype.plugins.maya.publish.validate\_rig\_controllers\_arnold\_attributes module
-===============================================================================
-
-.. automodule:: pype.plugins.maya.publish.validate_rig_controllers_arnold_attributes
- :members:
- :undoc-members:
- :show-inheritance:
diff --git a/docs/source/pype.plugins.maya.publish.validate_rig_out_set_node_ids.rst b/docs/source/pype.plugins.maya.publish.validate_rig_out_set_node_ids.rst
deleted file mode 100644
index e4f1cfc428..0000000000
--- a/docs/source/pype.plugins.maya.publish.validate_rig_out_set_node_ids.rst
+++ /dev/null
@@ -1,7 +0,0 @@
-pype.plugins.maya.publish.validate\_rig\_out\_set\_node\_ids module
-===================================================================
-
-.. automodule:: pype.plugins.maya.publish.validate_rig_out_set_node_ids
- :members:
- :undoc-members:
- :show-inheritance:
diff --git a/docs/source/pype.plugins.maya.publish.validate_rig_output_ids.rst b/docs/source/pype.plugins.maya.publish.validate_rig_output_ids.rst
deleted file mode 100644
index e1d3b1a659..0000000000
--- a/docs/source/pype.plugins.maya.publish.validate_rig_output_ids.rst
+++ /dev/null
@@ -1,7 +0,0 @@
-pype.plugins.maya.publish.validate\_rig\_output\_ids module
-===========================================================
-
-.. automodule:: pype.plugins.maya.publish.validate_rig_output_ids
- :members:
- :undoc-members:
- :show-inheritance:
diff --git a/docs/source/pype.plugins.maya.publish.validate_scene_set_workspace.rst b/docs/source/pype.plugins.maya.publish.validate_scene_set_workspace.rst
deleted file mode 100644
index daf2f152d9..0000000000
--- a/docs/source/pype.plugins.maya.publish.validate_scene_set_workspace.rst
+++ /dev/null
@@ -1,7 +0,0 @@
-pype.plugins.maya.publish.validate\_scene\_set\_workspace module
-================================================================
-
-.. automodule:: pype.plugins.maya.publish.validate_scene_set_workspace
- :members:
- :undoc-members:
- :show-inheritance:
diff --git a/docs/source/pype.plugins.maya.publish.validate_shader_name.rst b/docs/source/pype.plugins.maya.publish.validate_shader_name.rst
deleted file mode 100644
index ae5b196a1d..0000000000
--- a/docs/source/pype.plugins.maya.publish.validate_shader_name.rst
+++ /dev/null
@@ -1,7 +0,0 @@
-pype.plugins.maya.publish.validate\_shader\_name module
-=======================================================
-
-.. automodule:: pype.plugins.maya.publish.validate_shader_name
- :members:
- :undoc-members:
- :show-inheritance:
diff --git a/docs/source/pype.plugins.maya.publish.validate_shape_default_names.rst b/docs/source/pype.plugins.maya.publish.validate_shape_default_names.rst
deleted file mode 100644
index 49effc932d..0000000000
--- a/docs/source/pype.plugins.maya.publish.validate_shape_default_names.rst
+++ /dev/null
@@ -1,7 +0,0 @@
-pype.plugins.maya.publish.validate\_shape\_default\_names module
-================================================================
-
-.. automodule:: pype.plugins.maya.publish.validate_shape_default_names
- :members:
- :undoc-members:
- :show-inheritance:
diff --git a/docs/source/pype.plugins.maya.publish.validate_shape_render_stats.rst b/docs/source/pype.plugins.maya.publish.validate_shape_render_stats.rst
deleted file mode 100644
index 359af50a0f..0000000000
--- a/docs/source/pype.plugins.maya.publish.validate_shape_render_stats.rst
+++ /dev/null
@@ -1,7 +0,0 @@
-pype.plugins.maya.publish.validate\_shape\_render\_stats module
-===============================================================
-
-.. automodule:: pype.plugins.maya.publish.validate_shape_render_stats
- :members:
- :undoc-members:
- :show-inheritance:
diff --git a/docs/source/pype.plugins.maya.publish.validate_single_assembly.rst b/docs/source/pype.plugins.maya.publish.validate_single_assembly.rst
deleted file mode 100644
index 090f57b3ff..0000000000
--- a/docs/source/pype.plugins.maya.publish.validate_single_assembly.rst
+++ /dev/null
@@ -1,7 +0,0 @@
-pype.plugins.maya.publish.validate\_single\_assembly module
-===========================================================
-
-.. automodule:: pype.plugins.maya.publish.validate_single_assembly
- :members:
- :undoc-members:
- :show-inheritance:
diff --git a/docs/source/pype.plugins.maya.publish.validate_skinCluster_deformer_set.rst b/docs/source/pype.plugins.maya.publish.validate_skinCluster_deformer_set.rst
deleted file mode 100644
index 607a610097..0000000000
--- a/docs/source/pype.plugins.maya.publish.validate_skinCluster_deformer_set.rst
+++ /dev/null
@@ -1,7 +0,0 @@
-pype.plugins.maya.publish.validate\_skinCluster\_deformer\_set module
-=====================================================================
-
-.. automodule:: pype.plugins.maya.publish.validate_skinCluster_deformer_set
- :members:
- :undoc-members:
- :show-inheritance:
diff --git a/docs/source/pype.plugins.maya.publish.validate_step_size.rst b/docs/source/pype.plugins.maya.publish.validate_step_size.rst
deleted file mode 100644
index bb883ea7b5..0000000000
--- a/docs/source/pype.plugins.maya.publish.validate_step_size.rst
+++ /dev/null
@@ -1,7 +0,0 @@
-pype.plugins.maya.publish.validate\_step\_size module
-=====================================================
-
-.. automodule:: pype.plugins.maya.publish.validate_step_size
- :members:
- :undoc-members:
- :show-inheritance:
diff --git a/docs/source/pype.plugins.maya.publish.validate_transform_naming_suffix.rst b/docs/source/pype.plugins.maya.publish.validate_transform_naming_suffix.rst
deleted file mode 100644
index 4d7edda78d..0000000000
--- a/docs/source/pype.plugins.maya.publish.validate_transform_naming_suffix.rst
+++ /dev/null
@@ -1,7 +0,0 @@
-pype.plugins.maya.publish.validate\_transform\_naming\_suffix module
-====================================================================
-
-.. automodule:: pype.plugins.maya.publish.validate_transform_naming_suffix
- :members:
- :undoc-members:
- :show-inheritance:
diff --git a/docs/source/pype.plugins.maya.publish.validate_transform_zero.rst b/docs/source/pype.plugins.maya.publish.validate_transform_zero.rst
deleted file mode 100644
index 6d5cacfe00..0000000000
--- a/docs/source/pype.plugins.maya.publish.validate_transform_zero.rst
+++ /dev/null
@@ -1,7 +0,0 @@
-pype.plugins.maya.publish.validate\_transform\_zero module
-==========================================================
-
-.. automodule:: pype.plugins.maya.publish.validate_transform_zero
- :members:
- :undoc-members:
- :show-inheritance:
diff --git a/docs/source/pype.plugins.maya.publish.validate_unicode_strings.rst b/docs/source/pype.plugins.maya.publish.validate_unicode_strings.rst
deleted file mode 100644
index 9cc17d6810..0000000000
--- a/docs/source/pype.plugins.maya.publish.validate_unicode_strings.rst
+++ /dev/null
@@ -1,7 +0,0 @@
-pype.plugins.maya.publish.validate\_unicode\_strings module
-===========================================================
-
-.. automodule:: pype.plugins.maya.publish.validate_unicode_strings
- :members:
- :undoc-members:
- :show-inheritance:
diff --git a/docs/source/pype.plugins.maya.publish.validate_unreal_mesh_triangulated.rst b/docs/source/pype.plugins.maya.publish.validate_unreal_mesh_triangulated.rst
deleted file mode 100644
index 4dcb518194..0000000000
--- a/docs/source/pype.plugins.maya.publish.validate_unreal_mesh_triangulated.rst
+++ /dev/null
@@ -1,7 +0,0 @@
-pype.plugins.maya.publish.validate\_unreal\_mesh\_triangulated module
-=====================================================================
-
-.. automodule:: pype.plugins.maya.publish.validate_unreal_mesh_triangulated
- :members:
- :undoc-members:
- :show-inheritance:
diff --git a/docs/source/pype.plugins.maya.publish.validate_unreal_staticmesh_naming.rst b/docs/source/pype.plugins.maya.publish.validate_unreal_staticmesh_naming.rst
deleted file mode 100644
index f7225ab395..0000000000
--- a/docs/source/pype.plugins.maya.publish.validate_unreal_staticmesh_naming.rst
+++ /dev/null
@@ -1,7 +0,0 @@
-pype.plugins.maya.publish.validate\_unreal\_staticmesh\_naming module
-=====================================================================
-
-.. automodule:: pype.plugins.maya.publish.validate_unreal_staticmesh_naming
- :members:
- :undoc-members:
- :show-inheritance:
diff --git a/docs/source/pype.plugins.maya.publish.validate_unreal_up_axis.rst b/docs/source/pype.plugins.maya.publish.validate_unreal_up_axis.rst
deleted file mode 100644
index ff688c493f..0000000000
--- a/docs/source/pype.plugins.maya.publish.validate_unreal_up_axis.rst
+++ /dev/null
@@ -1,7 +0,0 @@
-pype.plugins.maya.publish.validate\_unreal\_up\_axis module
-===========================================================
-
-.. automodule:: pype.plugins.maya.publish.validate_unreal_up_axis
- :members:
- :undoc-members:
- :show-inheritance:
diff --git a/docs/source/pype.plugins.maya.publish.validate_vray_distributed_rendering.rst b/docs/source/pype.plugins.maya.publish.validate_vray_distributed_rendering.rst
deleted file mode 100644
index f5d05e6d76..0000000000
--- a/docs/source/pype.plugins.maya.publish.validate_vray_distributed_rendering.rst
+++ /dev/null
@@ -1,7 +0,0 @@
-pype.plugins.maya.publish.validate\_vray\_distributed\_rendering module
-=======================================================================
-
-.. automodule:: pype.plugins.maya.publish.validate_vray_distributed_rendering
- :members:
- :undoc-members:
- :show-inheritance:
diff --git a/docs/source/pype.plugins.maya.publish.validate_vray_referenced_aovs.rst b/docs/source/pype.plugins.maya.publish.validate_vray_referenced_aovs.rst
deleted file mode 100644
index 16ad9666aa..0000000000
--- a/docs/source/pype.plugins.maya.publish.validate_vray_referenced_aovs.rst
+++ /dev/null
@@ -1,7 +0,0 @@
-pype.plugins.maya.publish.validate\_vray\_referenced\_aovs module
-=================================================================
-
-.. automodule:: pype.plugins.maya.publish.validate_vray_referenced_aovs
- :members:
- :undoc-members:
- :show-inheritance:
diff --git a/docs/source/pype.plugins.maya.publish.validate_vray_translator_settings.rst b/docs/source/pype.plugins.maya.publish.validate_vray_translator_settings.rst
deleted file mode 100644
index a06a9531dd..0000000000
--- a/docs/source/pype.plugins.maya.publish.validate_vray_translator_settings.rst
+++ /dev/null
@@ -1,7 +0,0 @@
-pype.plugins.maya.publish.validate\_vray\_translator\_settings module
-=====================================================================
-
-.. automodule:: pype.plugins.maya.publish.validate_vray_translator_settings
- :members:
- :undoc-members:
- :show-inheritance:
diff --git a/docs/source/pype.plugins.maya.publish.validate_vrayproxy.rst b/docs/source/pype.plugins.maya.publish.validate_vrayproxy.rst
deleted file mode 100644
index 081f58924a..0000000000
--- a/docs/source/pype.plugins.maya.publish.validate_vrayproxy.rst
+++ /dev/null
@@ -1,7 +0,0 @@
-pype.plugins.maya.publish.validate\_vrayproxy module
-====================================================
-
-.. automodule:: pype.plugins.maya.publish.validate_vrayproxy
- :members:
- :undoc-members:
- :show-inheritance:
diff --git a/docs/source/pype.plugins.maya.publish.validate_vrayproxy_members.rst b/docs/source/pype.plugins.maya.publish.validate_vrayproxy_members.rst
deleted file mode 100644
index 7c587f39b0..0000000000
--- a/docs/source/pype.plugins.maya.publish.validate_vrayproxy_members.rst
+++ /dev/null
@@ -1,7 +0,0 @@
-pype.plugins.maya.publish.validate\_vrayproxy\_members module
-=============================================================
-
-.. automodule:: pype.plugins.maya.publish.validate_vrayproxy_members
- :members:
- :undoc-members:
- :show-inheritance:
diff --git a/docs/source/pype.plugins.maya.publish.validate_yeti_renderscript_callbacks.rst b/docs/source/pype.plugins.maya.publish.validate_yeti_renderscript_callbacks.rst
deleted file mode 100644
index 889d469b2f..0000000000
--- a/docs/source/pype.plugins.maya.publish.validate_yeti_renderscript_callbacks.rst
+++ /dev/null
@@ -1,7 +0,0 @@
-pype.plugins.maya.publish.validate\_yeti\_renderscript\_callbacks module
-========================================================================
-
-.. automodule:: pype.plugins.maya.publish.validate_yeti_renderscript_callbacks
- :members:
- :undoc-members:
- :show-inheritance:
diff --git a/docs/source/pype.plugins.maya.publish.validate_yeti_rig_cache_state.rst b/docs/source/pype.plugins.maya.publish.validate_yeti_rig_cache_state.rst
deleted file mode 100644
index 4138b1e8a4..0000000000
--- a/docs/source/pype.plugins.maya.publish.validate_yeti_rig_cache_state.rst
+++ /dev/null
@@ -1,7 +0,0 @@
-pype.plugins.maya.publish.validate\_yeti\_rig\_cache\_state module
-==================================================================
-
-.. automodule:: pype.plugins.maya.publish.validate_yeti_rig_cache_state
- :members:
- :undoc-members:
- :show-inheritance:
diff --git a/docs/source/pype.plugins.maya.publish.validate_yeti_rig_input_in_instance.rst b/docs/source/pype.plugins.maya.publish.validate_yeti_rig_input_in_instance.rst
deleted file mode 100644
index 37b862926c..0000000000
--- a/docs/source/pype.plugins.maya.publish.validate_yeti_rig_input_in_instance.rst
+++ /dev/null
@@ -1,7 +0,0 @@
-pype.plugins.maya.publish.validate\_yeti\_rig\_input\_in\_instance module
-=========================================================================
-
-.. automodule:: pype.plugins.maya.publish.validate_yeti_rig_input_in_instance
- :members:
- :undoc-members:
- :show-inheritance:
diff --git a/docs/source/pype.plugins.maya.publish.validate_yeti_rig_settings.rst b/docs/source/pype.plugins.maya.publish.validate_yeti_rig_settings.rst
deleted file mode 100644
index 9fd54193dc..0000000000
--- a/docs/source/pype.plugins.maya.publish.validate_yeti_rig_settings.rst
+++ /dev/null
@@ -1,7 +0,0 @@
-pype.plugins.maya.publish.validate\_yeti\_rig\_settings module
-==============================================================
-
-.. automodule:: pype.plugins.maya.publish.validate_yeti_rig_settings
- :members:
- :undoc-members:
- :show-inheritance:
diff --git a/docs/source/pype.plugins.maya.rst b/docs/source/pype.plugins.maya.rst
deleted file mode 100644
index 129cf5fce9..0000000000
--- a/docs/source/pype.plugins.maya.rst
+++ /dev/null
@@ -1,15 +0,0 @@
-pype.plugins.maya package
-=========================
-
-.. automodule:: pype.plugins.maya
- :members:
- :undoc-members:
- :show-inheritance:
-
-Subpackages
------------
-
-.. toctree::
- :maxdepth: 10
-
- pype.plugins.maya.publish
diff --git a/docs/source/pype.plugins.rst b/docs/source/pype.plugins.rst
deleted file mode 100644
index 8e5e45ba5d..0000000000
--- a/docs/source/pype.plugins.rst
+++ /dev/null
@@ -1,15 +0,0 @@
-pype.plugins package
-====================
-
-.. automodule:: pype.plugins
- :members:
- :undoc-members:
- :show-inheritance:
-
-Subpackages
------------
-
-.. toctree::
- :maxdepth: 10
-
- pype.plugins.maya
diff --git a/docs/source/pype.pype_commands.rst b/docs/source/pype.pype_commands.rst
deleted file mode 100644
index b8a416df7b..0000000000
--- a/docs/source/pype.pype_commands.rst
+++ /dev/null
@@ -1,7 +0,0 @@
-pype.pype\_commands module
-==========================
-
-.. automodule:: pype.pype_commands
- :members:
- :undoc-members:
- :show-inheritance:
diff --git a/docs/source/pype.resources.rst b/docs/source/pype.resources.rst
deleted file mode 100644
index 2fb5b92dce..0000000000
--- a/docs/source/pype.resources.rst
+++ /dev/null
@@ -1,7 +0,0 @@
-pype.resources package
-======================
-
-.. automodule:: pype.resources
- :members:
- :undoc-members:
- :show-inheritance:
diff --git a/docs/source/pype.rst b/docs/source/pype.rst
deleted file mode 100644
index 3589d2f3fe..0000000000
--- a/docs/source/pype.rst
+++ /dev/null
@@ -1,99 +0,0 @@
-pype package
-============
-
-.. automodule:: pype
- :members:
- :undoc-members:
- :show-inheritance:
-
-Subpackages
------------
-
-.. toctree::
- :maxdepth: 6
-
- pype.hosts
- pype.lib
- pype.modules
- pype.resources
- pype.scripts
- pype.settings
- pype.tests
- pype.tools
- pype.vendor
- pype.widgets
-
-Submodules
-----------
-
-pype.action module
-------------------
-
-.. automodule:: pype.action
- :members:
- :undoc-members:
- :show-inheritance:
-
-pype.api module
----------------
-
-.. automodule:: pype.api
- :members:
- :undoc-members:
- :show-inheritance:
-
-pype.cli module
----------------
-
-.. automodule:: pype.cli
- :members:
- :undoc-members:
- :show-inheritance:
-
-pype.launcher\_actions module
------------------------------
-
-.. automodule:: pype.launcher_actions
- :members:
- :undoc-members:
- :show-inheritance:
-
-pype.modules\_manager module
-----------------------------
-
-.. automodule:: pype.modules_manager
- :members:
- :undoc-members:
- :show-inheritance:
-
-pype.plugin module
-------------------
-
-.. automodule:: pype.plugin
- :members:
- :undoc-members:
- :show-inheritance:
-
-pype.pype\_commands module
---------------------------
-
-.. automodule:: pype.pype_commands
- :members:
- :undoc-members:
- :show-inheritance:
-
-pype.setdress\_api module
--------------------------
-
-.. automodule:: pype.setdress_api
- :members:
- :undoc-members:
- :show-inheritance:
-
-pype.version module
--------------------
-
-.. automodule:: pype.version
- :members:
- :undoc-members:
- :show-inheritance:
diff --git a/docs/source/pype.scripts.export_maya_ass_job.rst b/docs/source/pype.scripts.export_maya_ass_job.rst
deleted file mode 100644
index c35cc49ddd..0000000000
--- a/docs/source/pype.scripts.export_maya_ass_job.rst
+++ /dev/null
@@ -1,7 +0,0 @@
-pype.scripts.export\_maya\_ass\_job module
-==========================================
-
-.. automodule:: pype.scripts.export_maya_ass_job
- :members:
- :undoc-members:
- :show-inheritance:
diff --git a/docs/source/pype.scripts.fusion_switch_shot.rst b/docs/source/pype.scripts.fusion_switch_shot.rst
deleted file mode 100644
index 39d3473d16..0000000000
--- a/docs/source/pype.scripts.fusion_switch_shot.rst
+++ /dev/null
@@ -1,7 +0,0 @@
-pype.scripts.fusion\_switch\_shot module
-========================================
-
-.. automodule:: pype.scripts.fusion_switch_shot
- :members:
- :undoc-members:
- :show-inheritance:
diff --git a/docs/source/pype.scripts.otio_burnin.rst b/docs/source/pype.scripts.otio_burnin.rst
deleted file mode 100644
index e6a93017f5..0000000000
--- a/docs/source/pype.scripts.otio_burnin.rst
+++ /dev/null
@@ -1,7 +0,0 @@
-pype.scripts.otio\_burnin module
-================================
-
-.. automodule:: pype.scripts.otio_burnin
- :members:
- :undoc-members:
- :show-inheritance:
diff --git a/docs/source/pype.scripts.publish_deadline.rst b/docs/source/pype.scripts.publish_deadline.rst
deleted file mode 100644
index d134e17244..0000000000
--- a/docs/source/pype.scripts.publish_deadline.rst
+++ /dev/null
@@ -1,7 +0,0 @@
-pype.scripts.publish\_deadline module
-=====================================
-
-.. automodule:: pype.scripts.publish_deadline
- :members:
- :undoc-members:
- :show-inheritance:
diff --git a/docs/source/pype.scripts.publish_filesequence.rst b/docs/source/pype.scripts.publish_filesequence.rst
deleted file mode 100644
index 440d52caad..0000000000
--- a/docs/source/pype.scripts.publish_filesequence.rst
+++ /dev/null
@@ -1,7 +0,0 @@
-pype.scripts.publish\_filesequence module
-=========================================
-
-.. automodule:: pype.scripts.publish_filesequence
- :members:
- :undoc-members:
- :show-inheritance:
diff --git a/docs/source/pype.scripts.rst b/docs/source/pype.scripts.rst
deleted file mode 100644
index 5985771b97..0000000000
--- a/docs/source/pype.scripts.rst
+++ /dev/null
@@ -1,58 +0,0 @@
-pype.scripts package
-====================
-
-.. automodule:: pype.scripts
- :members:
- :undoc-members:
- :show-inheritance:
-
-Subpackages
------------
-
-.. toctree::
- :maxdepth: 6
-
- pype.scripts.slates
-
-Submodules
-----------
-
-pype.scripts.export\_maya\_ass\_job module
-------------------------------------------
-
-.. automodule:: pype.scripts.export_maya_ass_job
- :members:
- :undoc-members:
- :show-inheritance:
-
-pype.scripts.fusion\_switch\_shot module
-----------------------------------------
-
-.. automodule:: pype.scripts.fusion_switch_shot
- :members:
- :undoc-members:
- :show-inheritance:
-
-pype.scripts.otio\_burnin module
---------------------------------
-
-.. automodule:: pype.scripts.otio_burnin
- :members:
- :undoc-members:
- :show-inheritance:
-
-pype.scripts.publish\_deadline module
--------------------------------------
-
-.. automodule:: pype.scripts.publish_deadline
- :members:
- :undoc-members:
- :show-inheritance:
-
-pype.scripts.publish\_filesequence module
------------------------------------------
-
-.. automodule:: pype.scripts.publish_filesequence
- :members:
- :undoc-members:
- :show-inheritance:
diff --git a/docs/source/pype.scripts.slates.rst b/docs/source/pype.scripts.slates.rst
deleted file mode 100644
index 74b4cb4343..0000000000
--- a/docs/source/pype.scripts.slates.rst
+++ /dev/null
@@ -1,15 +0,0 @@
-pype.scripts.slates package
-===========================
-
-.. automodule:: pype.scripts.slates
- :members:
- :undoc-members:
- :show-inheritance:
-
-Subpackages
------------
-
-.. toctree::
- :maxdepth: 6
-
- pype.scripts.slates.slate_base
diff --git a/docs/source/pype.scripts.slates.slate_base.api.rst b/docs/source/pype.scripts.slates.slate_base.api.rst
deleted file mode 100644
index 0016a5c42a..0000000000
--- a/docs/source/pype.scripts.slates.slate_base.api.rst
+++ /dev/null
@@ -1,7 +0,0 @@
-pype.scripts.slates.slate\_base.api module
-==========================================
-
-.. automodule:: pype.scripts.slates.slate_base.api
- :members:
- :undoc-members:
- :show-inheritance:
diff --git a/docs/source/pype.scripts.slates.slate_base.base.rst b/docs/source/pype.scripts.slates.slate_base.base.rst
deleted file mode 100644
index 5e34d654b0..0000000000
--- a/docs/source/pype.scripts.slates.slate_base.base.rst
+++ /dev/null
@@ -1,7 +0,0 @@
-pype.scripts.slates.slate\_base.base module
-===========================================
-
-.. automodule:: pype.scripts.slates.slate_base.base
- :members:
- :undoc-members:
- :show-inheritance:
diff --git a/docs/source/pype.scripts.slates.slate_base.example.rst b/docs/source/pype.scripts.slates.slate_base.example.rst
deleted file mode 100644
index 95ebcc835a..0000000000
--- a/docs/source/pype.scripts.slates.slate_base.example.rst
+++ /dev/null
@@ -1,7 +0,0 @@
-pype.scripts.slates.slate\_base.example module
-==============================================
-
-.. automodule:: pype.scripts.slates.slate_base.example
- :members:
- :undoc-members:
- :show-inheritance:
diff --git a/docs/source/pype.scripts.slates.slate_base.font_factory.rst b/docs/source/pype.scripts.slates.slate_base.font_factory.rst
deleted file mode 100644
index c53efef554..0000000000
--- a/docs/source/pype.scripts.slates.slate_base.font_factory.rst
+++ /dev/null
@@ -1,7 +0,0 @@
-pype.scripts.slates.slate\_base.font\_factory module
-====================================================
-
-.. automodule:: pype.scripts.slates.slate_base.font_factory
- :members:
- :undoc-members:
- :show-inheritance:
diff --git a/docs/source/pype.scripts.slates.slate_base.items.rst b/docs/source/pype.scripts.slates.slate_base.items.rst
deleted file mode 100644
index 25abb11bb9..0000000000
--- a/docs/source/pype.scripts.slates.slate_base.items.rst
+++ /dev/null
@@ -1,7 +0,0 @@
-pype.scripts.slates.slate\_base.items module
-============================================
-
-.. automodule:: pype.scripts.slates.slate_base.items
- :members:
- :undoc-members:
- :show-inheritance:
diff --git a/docs/source/pype.scripts.slates.slate_base.layer.rst b/docs/source/pype.scripts.slates.slate_base.layer.rst
deleted file mode 100644
index 8681e3accf..0000000000
--- a/docs/source/pype.scripts.slates.slate_base.layer.rst
+++ /dev/null
@@ -1,7 +0,0 @@
-pype.scripts.slates.slate\_base.layer module
-============================================
-
-.. automodule:: pype.scripts.slates.slate_base.layer
- :members:
- :undoc-members:
- :show-inheritance:
diff --git a/docs/source/pype.scripts.slates.slate_base.lib.rst b/docs/source/pype.scripts.slates.slate_base.lib.rst
deleted file mode 100644
index c4ef2c912e..0000000000
--- a/docs/source/pype.scripts.slates.slate_base.lib.rst
+++ /dev/null
@@ -1,7 +0,0 @@
-pype.scripts.slates.slate\_base.lib module
-==========================================
-
-.. automodule:: pype.scripts.slates.slate_base.lib
- :members:
- :undoc-members:
- :show-inheritance:
diff --git a/docs/source/pype.scripts.slates.slate_base.main_frame.rst b/docs/source/pype.scripts.slates.slate_base.main_frame.rst
deleted file mode 100644
index 5093c28a74..0000000000
--- a/docs/source/pype.scripts.slates.slate_base.main_frame.rst
+++ /dev/null
@@ -1,7 +0,0 @@
-pype.scripts.slates.slate\_base.main\_frame module
-==================================================
-
-.. automodule:: pype.scripts.slates.slate_base.main_frame
- :members:
- :undoc-members:
- :show-inheritance:
diff --git a/docs/source/pype.scripts.slates.slate_base.rst b/docs/source/pype.scripts.slates.slate_base.rst
deleted file mode 100644
index 00726c04bf..0000000000
--- a/docs/source/pype.scripts.slates.slate_base.rst
+++ /dev/null
@@ -1,74 +0,0 @@
-pype.scripts.slates.slate\_base package
-=======================================
-
-.. automodule:: pype.scripts.slates.slate_base
- :members:
- :undoc-members:
- :show-inheritance:
-
-Submodules
-----------
-
-pype.scripts.slates.slate\_base.api module
-------------------------------------------
-
-.. automodule:: pype.scripts.slates.slate_base.api
- :members:
- :undoc-members:
- :show-inheritance:
-
-pype.scripts.slates.slate\_base.base module
--------------------------------------------
-
-.. automodule:: pype.scripts.slates.slate_base.base
- :members:
- :undoc-members:
- :show-inheritance:
-
-pype.scripts.slates.slate\_base.example module
-----------------------------------------------
-
-.. automodule:: pype.scripts.slates.slate_base.example
- :members:
- :undoc-members:
- :show-inheritance:
-
-pype.scripts.slates.slate\_base.font\_factory module
-----------------------------------------------------
-
-.. automodule:: pype.scripts.slates.slate_base.font_factory
- :members:
- :undoc-members:
- :show-inheritance:
-
-pype.scripts.slates.slate\_base.items module
---------------------------------------------
-
-.. automodule:: pype.scripts.slates.slate_base.items
- :members:
- :undoc-members:
- :show-inheritance:
-
-pype.scripts.slates.slate\_base.layer module
---------------------------------------------
-
-.. automodule:: pype.scripts.slates.slate_base.layer
- :members:
- :undoc-members:
- :show-inheritance:
-
-pype.scripts.slates.slate\_base.lib module
-------------------------------------------
-
-.. automodule:: pype.scripts.slates.slate_base.lib
- :members:
- :undoc-members:
- :show-inheritance:
-
-pype.scripts.slates.slate\_base.main\_frame module
---------------------------------------------------
-
-.. automodule:: pype.scripts.slates.slate_base.main_frame
- :members:
- :undoc-members:
- :show-inheritance:
diff --git a/docs/source/pype.setdress_api.rst b/docs/source/pype.setdress_api.rst
deleted file mode 100644
index 95638ea64d..0000000000
--- a/docs/source/pype.setdress_api.rst
+++ /dev/null
@@ -1,7 +0,0 @@
-pype.setdress\_api module
-=========================
-
-.. automodule:: pype.setdress_api
- :members:
- :undoc-members:
- :show-inheritance:
diff --git a/docs/source/pype.settings.constants.rst b/docs/source/pype.settings.constants.rst
deleted file mode 100644
index ac652089c8..0000000000
--- a/docs/source/pype.settings.constants.rst
+++ /dev/null
@@ -1,7 +0,0 @@
-pype.settings.constants module
-==============================
-
-.. automodule:: pype.settings.constants
- :members:
- :undoc-members:
- :show-inheritance:
diff --git a/docs/source/pype.settings.handlers.rst b/docs/source/pype.settings.handlers.rst
deleted file mode 100644
index 60ea0ae952..0000000000
--- a/docs/source/pype.settings.handlers.rst
+++ /dev/null
@@ -1,7 +0,0 @@
-pype.settings.handlers module
-=============================
-
-.. automodule:: pype.settings.handlers
- :members:
- :undoc-members:
- :show-inheritance:
diff --git a/docs/source/pype.settings.lib.rst b/docs/source/pype.settings.lib.rst
deleted file mode 100644
index d6e3e8bd06..0000000000
--- a/docs/source/pype.settings.lib.rst
+++ /dev/null
@@ -1,7 +0,0 @@
-pype.settings.lib module
-========================
-
-.. automodule:: pype.settings.lib
- :members:
- :undoc-members:
- :show-inheritance:
diff --git a/docs/source/pype.settings.rst b/docs/source/pype.settings.rst
deleted file mode 100644
index 5bf131d555..0000000000
--- a/docs/source/pype.settings.rst
+++ /dev/null
@@ -1,18 +0,0 @@
-pype.settings package
-=====================
-
-.. automodule:: pype.settings
- :members:
- :undoc-members:
- :show-inheritance:
-
-Submodules
-----------
-
-pype.settings.lib module
-------------------------
-
-.. automodule:: pype.settings.lib
- :members:
- :undoc-members:
- :show-inheritance:
diff --git a/docs/source/pype.tests.lib.rst b/docs/source/pype.tests.lib.rst
deleted file mode 100644
index 375ebd0258..0000000000
--- a/docs/source/pype.tests.lib.rst
+++ /dev/null
@@ -1,7 +0,0 @@
-pype.tests.lib module
-=====================
-
-.. automodule:: pype.tests.lib
- :members:
- :undoc-members:
- :show-inheritance:
diff --git a/docs/source/pype.tests.rst b/docs/source/pype.tests.rst
deleted file mode 100644
index 3f34cdcd77..0000000000
--- a/docs/source/pype.tests.rst
+++ /dev/null
@@ -1,42 +0,0 @@
-pype.tests package
-==================
-
-.. automodule:: pype.tests
- :members:
- :undoc-members:
- :show-inheritance:
-
-Submodules
-----------
-
-pype.tests.lib module
----------------------
-
-.. automodule:: pype.tests.lib
- :members:
- :undoc-members:
- :show-inheritance:
-
-pype.tests.test\_avalon\_plugin\_presets module
------------------------------------------------
-
-.. automodule:: pype.tests.test_avalon_plugin_presets
- :members:
- :undoc-members:
- :show-inheritance:
-
-pype.tests.test\_mongo\_performance module
-------------------------------------------
-
-.. automodule:: pype.tests.test_mongo_performance
- :members:
- :undoc-members:
- :show-inheritance:
-
-pype.tests.test\_pyblish\_filter module
----------------------------------------
-
-.. automodule:: pype.tests.test_pyblish_filter
- :members:
- :undoc-members:
- :show-inheritance:
diff --git a/docs/source/pype.tests.test_avalon_plugin_presets.rst b/docs/source/pype.tests.test_avalon_plugin_presets.rst
deleted file mode 100644
index b4ff802256..0000000000
--- a/docs/source/pype.tests.test_avalon_plugin_presets.rst
+++ /dev/null
@@ -1,7 +0,0 @@
-pype.tests.test\_avalon\_plugin\_presets module
-===============================================
-
-.. automodule:: pype.tests.test_avalon_plugin_presets
- :members:
- :undoc-members:
- :show-inheritance:
diff --git a/docs/source/pype.tests.test_lib_restructuralization.rst b/docs/source/pype.tests.test_lib_restructuralization.rst
deleted file mode 100644
index 8d426fcb6b..0000000000
--- a/docs/source/pype.tests.test_lib_restructuralization.rst
+++ /dev/null
@@ -1,7 +0,0 @@
-pype.tests.test\_lib\_restructuralization module
-================================================
-
-.. automodule:: pype.tests.test_lib_restructuralization
- :members:
- :undoc-members:
- :show-inheritance:
diff --git a/docs/source/pype.tests.test_mongo_performance.rst b/docs/source/pype.tests.test_mongo_performance.rst
deleted file mode 100644
index 4686247e59..0000000000
--- a/docs/source/pype.tests.test_mongo_performance.rst
+++ /dev/null
@@ -1,7 +0,0 @@
-pype.tests.test\_mongo\_performance module
-==========================================
-
-.. automodule:: pype.tests.test_mongo_performance
- :members:
- :undoc-members:
- :show-inheritance:
diff --git a/docs/source/pype.tests.test_pyblish_filter.rst b/docs/source/pype.tests.test_pyblish_filter.rst
deleted file mode 100644
index 196ec02433..0000000000
--- a/docs/source/pype.tests.test_pyblish_filter.rst
+++ /dev/null
@@ -1,7 +0,0 @@
-pype.tests.test\_pyblish\_filter module
-=======================================
-
-.. automodule:: pype.tests.test_pyblish_filter
- :members:
- :undoc-members:
- :show-inheritance:
diff --git a/docs/source/pype.tools.assetcreator.app.rst b/docs/source/pype.tools.assetcreator.app.rst
deleted file mode 100644
index b46281b07a..0000000000
--- a/docs/source/pype.tools.assetcreator.app.rst
+++ /dev/null
@@ -1,7 +0,0 @@
-pype.tools.assetcreator.app module
-==================================
-
-.. automodule:: pype.tools.assetcreator.app
- :members:
- :undoc-members:
- :show-inheritance:
diff --git a/docs/source/pype.tools.assetcreator.model.rst b/docs/source/pype.tools.assetcreator.model.rst
deleted file mode 100644
index 752791d07c..0000000000
--- a/docs/source/pype.tools.assetcreator.model.rst
+++ /dev/null
@@ -1,7 +0,0 @@
-pype.tools.assetcreator.model module
-====================================
-
-.. automodule:: pype.tools.assetcreator.model
- :members:
- :undoc-members:
- :show-inheritance:
diff --git a/docs/source/pype.tools.assetcreator.rst b/docs/source/pype.tools.assetcreator.rst
deleted file mode 100644
index b95c3b3c60..0000000000
--- a/docs/source/pype.tools.assetcreator.rst
+++ /dev/null
@@ -1,34 +0,0 @@
-pype.tools.assetcreator package
-===============================
-
-.. automodule:: pype.tools.assetcreator
- :members:
- :undoc-members:
- :show-inheritance:
-
-Submodules
-----------
-
-pype.tools.assetcreator.app module
-----------------------------------
-
-.. automodule:: pype.tools.assetcreator.app
- :members:
- :undoc-members:
- :show-inheritance:
-
-pype.tools.assetcreator.model module
-------------------------------------
-
-.. automodule:: pype.tools.assetcreator.model
- :members:
- :undoc-members:
- :show-inheritance:
-
-pype.tools.assetcreator.widget module
--------------------------------------
-
-.. automodule:: pype.tools.assetcreator.widget
- :members:
- :undoc-members:
- :show-inheritance:
diff --git a/docs/source/pype.tools.assetcreator.widget.rst b/docs/source/pype.tools.assetcreator.widget.rst
deleted file mode 100644
index 23ed335306..0000000000
--- a/docs/source/pype.tools.assetcreator.widget.rst
+++ /dev/null
@@ -1,7 +0,0 @@
-pype.tools.assetcreator.widget module
-=====================================
-
-.. automodule:: pype.tools.assetcreator.widget
- :members:
- :undoc-members:
- :show-inheritance:
diff --git a/docs/source/pype.tools.launcher.actions.rst b/docs/source/pype.tools.launcher.actions.rst
deleted file mode 100644
index e2ec217d4b..0000000000
--- a/docs/source/pype.tools.launcher.actions.rst
+++ /dev/null
@@ -1,7 +0,0 @@
-pype.tools.launcher.actions module
-==================================
-
-.. automodule:: pype.tools.launcher.actions
- :members:
- :undoc-members:
- :show-inheritance:
diff --git a/docs/source/pype.tools.launcher.delegates.rst b/docs/source/pype.tools.launcher.delegates.rst
deleted file mode 100644
index e8a7519cd5..0000000000
--- a/docs/source/pype.tools.launcher.delegates.rst
+++ /dev/null
@@ -1,7 +0,0 @@
-pype.tools.launcher.delegates module
-====================================
-
-.. automodule:: pype.tools.launcher.delegates
- :members:
- :undoc-members:
- :show-inheritance:
diff --git a/docs/source/pype.tools.launcher.flickcharm.rst b/docs/source/pype.tools.launcher.flickcharm.rst
deleted file mode 100644
index 5105d3235e..0000000000
--- a/docs/source/pype.tools.launcher.flickcharm.rst
+++ /dev/null
@@ -1,7 +0,0 @@
-pype.tools.launcher.flickcharm module
-=====================================
-
-.. automodule:: pype.tools.launcher.flickcharm
- :members:
- :undoc-members:
- :show-inheritance:
diff --git a/docs/source/pype.tools.launcher.lib.rst b/docs/source/pype.tools.launcher.lib.rst
deleted file mode 100644
index 28db8a6540..0000000000
--- a/docs/source/pype.tools.launcher.lib.rst
+++ /dev/null
@@ -1,7 +0,0 @@
-pype.tools.launcher.lib module
-==============================
-
-.. automodule:: pype.tools.launcher.lib
- :members:
- :undoc-members:
- :show-inheritance:
diff --git a/docs/source/pype.tools.launcher.models.rst b/docs/source/pype.tools.launcher.models.rst
deleted file mode 100644
index 701826284e..0000000000
--- a/docs/source/pype.tools.launcher.models.rst
+++ /dev/null
@@ -1,7 +0,0 @@
-pype.tools.launcher.models module
-=================================
-
-.. automodule:: pype.tools.launcher.models
- :members:
- :undoc-members:
- :show-inheritance:
diff --git a/docs/source/pype.tools.launcher.rst b/docs/source/pype.tools.launcher.rst
deleted file mode 100644
index c4782bf9bb..0000000000
--- a/docs/source/pype.tools.launcher.rst
+++ /dev/null
@@ -1,66 +0,0 @@
-pype.tools.launcher package
-===========================
-
-.. automodule:: pype.tools.launcher
- :members:
- :undoc-members:
- :show-inheritance:
-
-Submodules
-----------
-
-pype.tools.launcher.actions module
-----------------------------------
-
-.. automodule:: pype.tools.launcher.actions
- :members:
- :undoc-members:
- :show-inheritance:
-
-pype.tools.launcher.delegates module
-------------------------------------
-
-.. automodule:: pype.tools.launcher.delegates
- :members:
- :undoc-members:
- :show-inheritance:
-
-pype.tools.launcher.flickcharm module
--------------------------------------
-
-.. automodule:: pype.tools.launcher.flickcharm
- :members:
- :undoc-members:
- :show-inheritance:
-
-pype.tools.launcher.lib module
-------------------------------
-
-.. automodule:: pype.tools.launcher.lib
- :members:
- :undoc-members:
- :show-inheritance:
-
-pype.tools.launcher.models module
----------------------------------
-
-.. automodule:: pype.tools.launcher.models
- :members:
- :undoc-members:
- :show-inheritance:
-
-pype.tools.launcher.widgets module
-----------------------------------
-
-.. automodule:: pype.tools.launcher.widgets
- :members:
- :undoc-members:
- :show-inheritance:
-
-pype.tools.launcher.window module
----------------------------------
-
-.. automodule:: pype.tools.launcher.window
- :members:
- :undoc-members:
- :show-inheritance:
diff --git a/docs/source/pype.tools.launcher.widgets.rst b/docs/source/pype.tools.launcher.widgets.rst
deleted file mode 100644
index 400a5b7a2c..0000000000
--- a/docs/source/pype.tools.launcher.widgets.rst
+++ /dev/null
@@ -1,7 +0,0 @@
-pype.tools.launcher.widgets module
-==================================
-
-.. automodule:: pype.tools.launcher.widgets
- :members:
- :undoc-members:
- :show-inheritance:
diff --git a/docs/source/pype.tools.launcher.window.rst b/docs/source/pype.tools.launcher.window.rst
deleted file mode 100644
index ae92207795..0000000000
--- a/docs/source/pype.tools.launcher.window.rst
+++ /dev/null
@@ -1,7 +0,0 @@
-pype.tools.launcher.window module
-=================================
-
-.. automodule:: pype.tools.launcher.window
- :members:
- :undoc-members:
- :show-inheritance:
diff --git a/docs/source/pype.tools.pyblish_pype.app.rst b/docs/source/pype.tools.pyblish_pype.app.rst
deleted file mode 100644
index a70aada725..0000000000
--- a/docs/source/pype.tools.pyblish_pype.app.rst
+++ /dev/null
@@ -1,7 +0,0 @@
-pype.tools.pyblish\_pype.app module
-===================================
-
-.. automodule:: pype.tools.pyblish_pype.app
- :members:
- :undoc-members:
- :show-inheritance:
diff --git a/docs/source/pype.tools.pyblish_pype.awesome.rst b/docs/source/pype.tools.pyblish_pype.awesome.rst
deleted file mode 100644
index 50a81ac5e8..0000000000
--- a/docs/source/pype.tools.pyblish_pype.awesome.rst
+++ /dev/null
@@ -1,7 +0,0 @@
-pype.tools.pyblish\_pype.awesome module
-=======================================
-
-.. automodule:: pype.tools.pyblish_pype.awesome
- :members:
- :undoc-members:
- :show-inheritance:
diff --git a/docs/source/pype.tools.pyblish_pype.compat.rst b/docs/source/pype.tools.pyblish_pype.compat.rst
deleted file mode 100644
index 4beee41e00..0000000000
--- a/docs/source/pype.tools.pyblish_pype.compat.rst
+++ /dev/null
@@ -1,7 +0,0 @@
-pype.tools.pyblish\_pype.compat module
-======================================
-
-.. automodule:: pype.tools.pyblish_pype.compat
- :members:
- :undoc-members:
- :show-inheritance:
diff --git a/docs/source/pype.tools.pyblish_pype.constants.rst b/docs/source/pype.tools.pyblish_pype.constants.rst
deleted file mode 100644
index bab67a2270..0000000000
--- a/docs/source/pype.tools.pyblish_pype.constants.rst
+++ /dev/null
@@ -1,7 +0,0 @@
-pype.tools.pyblish\_pype.constants module
-=========================================
-
-.. automodule:: pype.tools.pyblish_pype.constants
- :members:
- :undoc-members:
- :show-inheritance:
diff --git a/docs/source/pype.tools.pyblish_pype.control.rst b/docs/source/pype.tools.pyblish_pype.control.rst
deleted file mode 100644
index c2f8c0031e..0000000000
--- a/docs/source/pype.tools.pyblish_pype.control.rst
+++ /dev/null
@@ -1,7 +0,0 @@
-pype.tools.pyblish\_pype.control module
-=======================================
-
-.. automodule:: pype.tools.pyblish_pype.control
- :members:
- :undoc-members:
- :show-inheritance:
diff --git a/docs/source/pype.tools.pyblish_pype.delegate.rst b/docs/source/pype.tools.pyblish_pype.delegate.rst
deleted file mode 100644
index 8796c9830f..0000000000
--- a/docs/source/pype.tools.pyblish_pype.delegate.rst
+++ /dev/null
@@ -1,7 +0,0 @@
-pype.tools.pyblish\_pype.delegate module
-========================================
-
-.. automodule:: pype.tools.pyblish_pype.delegate
- :members:
- :undoc-members:
- :show-inheritance:
diff --git a/docs/source/pype.tools.pyblish_pype.mock.rst b/docs/source/pype.tools.pyblish_pype.mock.rst
deleted file mode 100644
index 8c22e80856..0000000000
--- a/docs/source/pype.tools.pyblish_pype.mock.rst
+++ /dev/null
@@ -1,7 +0,0 @@
-pype.tools.pyblish\_pype.mock module
-====================================
-
-.. automodule:: pype.tools.pyblish_pype.mock
- :members:
- :undoc-members:
- :show-inheritance:
diff --git a/docs/source/pype.tools.pyblish_pype.model.rst b/docs/source/pype.tools.pyblish_pype.model.rst
deleted file mode 100644
index 983b06cc8a..0000000000
--- a/docs/source/pype.tools.pyblish_pype.model.rst
+++ /dev/null
@@ -1,7 +0,0 @@
-pype.tools.pyblish\_pype.model module
-=====================================
-
-.. automodule:: pype.tools.pyblish_pype.model
- :members:
- :undoc-members:
- :show-inheritance:
diff --git a/docs/source/pype.tools.pyblish_pype.rst b/docs/source/pype.tools.pyblish_pype.rst
deleted file mode 100644
index 9479b5399f..0000000000
--- a/docs/source/pype.tools.pyblish_pype.rst
+++ /dev/null
@@ -1,130 +0,0 @@
-pype.tools.pyblish\_pype package
-================================
-
-.. automodule:: pype.tools.pyblish_pype
- :members:
- :undoc-members:
- :show-inheritance:
-
-Subpackages
------------
-
-.. toctree::
- :maxdepth: 6
-
- pype.tools.pyblish_pype.vendor
-
-Submodules
-----------
-
-pype.tools.pyblish\_pype.app module
------------------------------------
-
-.. automodule:: pype.tools.pyblish_pype.app
- :members:
- :undoc-members:
- :show-inheritance:
-
-pype.tools.pyblish\_pype.awesome module
----------------------------------------
-
-.. automodule:: pype.tools.pyblish_pype.awesome
- :members:
- :undoc-members:
- :show-inheritance:
-
-pype.tools.pyblish\_pype.compat module
---------------------------------------
-
-.. automodule:: pype.tools.pyblish_pype.compat
- :members:
- :undoc-members:
- :show-inheritance:
-
-pype.tools.pyblish\_pype.constants module
------------------------------------------
-
-.. automodule:: pype.tools.pyblish_pype.constants
- :members:
- :undoc-members:
- :show-inheritance:
-
-pype.tools.pyblish\_pype.control module
----------------------------------------
-
-.. automodule:: pype.tools.pyblish_pype.control
- :members:
- :undoc-members:
- :show-inheritance:
-
-pype.tools.pyblish\_pype.delegate module
-----------------------------------------
-
-.. automodule:: pype.tools.pyblish_pype.delegate
- :members:
- :undoc-members:
- :show-inheritance:
-
-pype.tools.pyblish\_pype.mock module
-------------------------------------
-
-.. automodule:: pype.tools.pyblish_pype.mock
- :members:
- :undoc-members:
- :show-inheritance:
-
-pype.tools.pyblish\_pype.model module
--------------------------------------
-
-.. automodule:: pype.tools.pyblish_pype.model
- :members:
- :undoc-members:
- :show-inheritance:
-
-pype.tools.pyblish\_pype.settings module
-----------------------------------------
-
-.. automodule:: pype.tools.pyblish_pype.settings
- :members:
- :undoc-members:
- :show-inheritance:
-
-pype.tools.pyblish\_pype.util module
-------------------------------------
-
-.. automodule:: pype.tools.pyblish_pype.util
- :members:
- :undoc-members:
- :show-inheritance:
-
-pype.tools.pyblish\_pype.version module
----------------------------------------
-
-.. automodule:: pype.tools.pyblish_pype.version
- :members:
- :undoc-members:
- :show-inheritance:
-
-pype.tools.pyblish\_pype.view module
-------------------------------------
-
-.. automodule:: pype.tools.pyblish_pype.view
- :members:
- :undoc-members:
- :show-inheritance:
-
-pype.tools.pyblish\_pype.widgets module
----------------------------------------
-
-.. automodule:: pype.tools.pyblish_pype.widgets
- :members:
- :undoc-members:
- :show-inheritance:
-
-pype.tools.pyblish\_pype.window module
---------------------------------------
-
-.. automodule:: pype.tools.pyblish_pype.window
- :members:
- :undoc-members:
- :show-inheritance:
diff --git a/docs/source/pype.tools.pyblish_pype.settings.rst b/docs/source/pype.tools.pyblish_pype.settings.rst
deleted file mode 100644
index 2e4e95cca0..0000000000
--- a/docs/source/pype.tools.pyblish_pype.settings.rst
+++ /dev/null
@@ -1,7 +0,0 @@
-pype.tools.pyblish\_pype.settings module
-========================================
-
-.. automodule:: pype.tools.pyblish_pype.settings
- :members:
- :undoc-members:
- :show-inheritance:
diff --git a/docs/source/pype.tools.pyblish_pype.util.rst b/docs/source/pype.tools.pyblish_pype.util.rst
deleted file mode 100644
index fa34295f12..0000000000
--- a/docs/source/pype.tools.pyblish_pype.util.rst
+++ /dev/null
@@ -1,7 +0,0 @@
-pype.tools.pyblish\_pype.util module
-====================================
-
-.. automodule:: pype.tools.pyblish_pype.util
- :members:
- :undoc-members:
- :show-inheritance:
diff --git a/docs/source/pype.tools.pyblish_pype.vendor.qtawesome.animation.rst b/docs/source/pype.tools.pyblish_pype.vendor.qtawesome.animation.rst
deleted file mode 100644
index a892128308..0000000000
--- a/docs/source/pype.tools.pyblish_pype.vendor.qtawesome.animation.rst
+++ /dev/null
@@ -1,7 +0,0 @@
-pype.tools.pyblish\_pype.vendor.qtawesome.animation module
-==========================================================
-
-.. automodule:: pype.tools.pyblish_pype.vendor.qtawesome.animation
- :members:
- :undoc-members:
- :show-inheritance:
diff --git a/docs/source/pype.tools.pyblish_pype.vendor.qtawesome.iconic_font.rst b/docs/source/pype.tools.pyblish_pype.vendor.qtawesome.iconic_font.rst
deleted file mode 100644
index 4f4337348f..0000000000
--- a/docs/source/pype.tools.pyblish_pype.vendor.qtawesome.iconic_font.rst
+++ /dev/null
@@ -1,7 +0,0 @@
-pype.tools.pyblish\_pype.vendor.qtawesome.iconic\_font module
-=============================================================
-
-.. automodule:: pype.tools.pyblish_pype.vendor.qtawesome.iconic_font
- :members:
- :undoc-members:
- :show-inheritance:
diff --git a/docs/source/pype.tools.pyblish_pype.vendor.qtawesome.rst b/docs/source/pype.tools.pyblish_pype.vendor.qtawesome.rst
deleted file mode 100644
index 68b2ec4659..0000000000
--- a/docs/source/pype.tools.pyblish_pype.vendor.qtawesome.rst
+++ /dev/null
@@ -1,26 +0,0 @@
-pype.tools.pyblish\_pype.vendor.qtawesome package
-=================================================
-
-.. automodule:: pype.tools.pyblish_pype.vendor.qtawesome
- :members:
- :undoc-members:
- :show-inheritance:
-
-Submodules
-----------
-
-pype.tools.pyblish\_pype.vendor.qtawesome.animation module
-----------------------------------------------------------
-
-.. automodule:: pype.tools.pyblish_pype.vendor.qtawesome.animation
- :members:
- :undoc-members:
- :show-inheritance:
-
-pype.tools.pyblish\_pype.vendor.qtawesome.iconic\_font module
--------------------------------------------------------------
-
-.. automodule:: pype.tools.pyblish_pype.vendor.qtawesome.iconic_font
- :members:
- :undoc-members:
- :show-inheritance:
diff --git a/docs/source/pype.tools.pyblish_pype.vendor.rst b/docs/source/pype.tools.pyblish_pype.vendor.rst
deleted file mode 100644
index 69e6096053..0000000000
--- a/docs/source/pype.tools.pyblish_pype.vendor.rst
+++ /dev/null
@@ -1,15 +0,0 @@
-pype.tools.pyblish\_pype.vendor package
-=======================================
-
-.. automodule:: pype.tools.pyblish_pype.vendor
- :members:
- :undoc-members:
- :show-inheritance:
-
-Subpackages
------------
-
-.. toctree::
- :maxdepth: 6
-
- pype.tools.pyblish_pype.vendor.qtawesome
diff --git a/docs/source/pype.tools.pyblish_pype.version.rst b/docs/source/pype.tools.pyblish_pype.version.rst
deleted file mode 100644
index a6ddcd5ce8..0000000000
--- a/docs/source/pype.tools.pyblish_pype.version.rst
+++ /dev/null
@@ -1,7 +0,0 @@
-pype.tools.pyblish\_pype.version module
-=======================================
-
-.. automodule:: pype.tools.pyblish_pype.version
- :members:
- :undoc-members:
- :show-inheritance:
diff --git a/docs/source/pype.tools.pyblish_pype.view.rst b/docs/source/pype.tools.pyblish_pype.view.rst
deleted file mode 100644
index 21d34d9daa..0000000000
--- a/docs/source/pype.tools.pyblish_pype.view.rst
+++ /dev/null
@@ -1,7 +0,0 @@
-pype.tools.pyblish\_pype.view module
-====================================
-
-.. automodule:: pype.tools.pyblish_pype.view
- :members:
- :undoc-members:
- :show-inheritance:
diff --git a/docs/source/pype.tools.pyblish_pype.widgets.rst b/docs/source/pype.tools.pyblish_pype.widgets.rst
deleted file mode 100644
index 8a0d3c380a..0000000000
--- a/docs/source/pype.tools.pyblish_pype.widgets.rst
+++ /dev/null
@@ -1,7 +0,0 @@
-pype.tools.pyblish\_pype.widgets module
-=======================================
-
-.. automodule:: pype.tools.pyblish_pype.widgets
- :members:
- :undoc-members:
- :show-inheritance:
diff --git a/docs/source/pype.tools.pyblish_pype.window.rst b/docs/source/pype.tools.pyblish_pype.window.rst
deleted file mode 100644
index 10f7b1a36e..0000000000
--- a/docs/source/pype.tools.pyblish_pype.window.rst
+++ /dev/null
@@ -1,7 +0,0 @@
-pype.tools.pyblish\_pype.window module
-======================================
-
-.. automodule:: pype.tools.pyblish_pype.window
- :members:
- :undoc-members:
- :show-inheritance:
diff --git a/docs/source/pype.tools.rst b/docs/source/pype.tools.rst
deleted file mode 100644
index d82ed3384a..0000000000
--- a/docs/source/pype.tools.rst
+++ /dev/null
@@ -1,19 +0,0 @@
-pype.tools package
-==================
-
-.. automodule:: pype.tools
- :members:
- :undoc-members:
- :show-inheritance:
-
-Subpackages
------------
-
-.. toctree::
- :maxdepth: 6
-
- pype.tools.assetcreator
- pype.tools.launcher
- pype.tools.pyblish_pype
- pype.tools.settings
- pype.tools.standalonepublish
diff --git a/docs/source/pype.tools.settings.rst b/docs/source/pype.tools.settings.rst
deleted file mode 100644
index ef54851ab1..0000000000
--- a/docs/source/pype.tools.settings.rst
+++ /dev/null
@@ -1,15 +0,0 @@
-pype.tools.settings package
-===========================
-
-.. automodule:: pype.tools.settings
- :members:
- :undoc-members:
- :show-inheritance:
-
-Subpackages
------------
-
-.. toctree::
- :maxdepth: 6
-
- pype.tools.settings.settings
diff --git a/docs/source/pype.tools.settings.settings.rst b/docs/source/pype.tools.settings.settings.rst
deleted file mode 100644
index 793914e1a8..0000000000
--- a/docs/source/pype.tools.settings.settings.rst
+++ /dev/null
@@ -1,16 +0,0 @@
-pype.tools.settings.settings package
-====================================
-
-.. automodule:: pype.tools.settings.settings
- :members:
- :undoc-members:
- :show-inheritance:
-
-Subpackages
------------
-
-.. toctree::
- :maxdepth: 6
-
- pype.tools.settings.settings.style
- pype.tools.settings.settings.widgets
diff --git a/docs/source/pype.tools.settings.settings.style.rst b/docs/source/pype.tools.settings.settings.style.rst
deleted file mode 100644
index 228322245c..0000000000
--- a/docs/source/pype.tools.settings.settings.style.rst
+++ /dev/null
@@ -1,7 +0,0 @@
-pype.tools.settings.settings.style package
-==========================================
-
-.. automodule:: pype.tools.settings.settings.style
- :members:
- :undoc-members:
- :show-inheritance:
diff --git a/docs/source/pype.tools.settings.settings.widgets.anatomy_types.rst b/docs/source/pype.tools.settings.settings.widgets.anatomy_types.rst
deleted file mode 100644
index ca951c82f0..0000000000
--- a/docs/source/pype.tools.settings.settings.widgets.anatomy_types.rst
+++ /dev/null
@@ -1,7 +0,0 @@
-pype.tools.settings.settings.widgets.anatomy\_types module
-==========================================================
-
-.. automodule:: pype.tools.settings.settings.widgets.anatomy_types
- :members:
- :undoc-members:
- :show-inheritance:
diff --git a/docs/source/pype.tools.settings.settings.widgets.base.rst b/docs/source/pype.tools.settings.settings.widgets.base.rst
deleted file mode 100644
index 8964d6f628..0000000000
--- a/docs/source/pype.tools.settings.settings.widgets.base.rst
+++ /dev/null
@@ -1,7 +0,0 @@
-pype.tools.settings.settings.widgets.base module
-================================================
-
-.. automodule:: pype.tools.settings.settings.widgets.base
- :members:
- :undoc-members:
- :show-inheritance:
diff --git a/docs/source/pype.tools.settings.settings.widgets.item_types.rst b/docs/source/pype.tools.settings.settings.widgets.item_types.rst
deleted file mode 100644
index 5e505538a7..0000000000
--- a/docs/source/pype.tools.settings.settings.widgets.item_types.rst
+++ /dev/null
@@ -1,7 +0,0 @@
-pype.tools.settings.settings.widgets.item\_types module
-=======================================================
-
-.. automodule:: pype.tools.settings.settings.widgets.item_types
- :members:
- :undoc-members:
- :show-inheritance:
diff --git a/docs/source/pype.tools.settings.settings.widgets.lib.rst b/docs/source/pype.tools.settings.settings.widgets.lib.rst
deleted file mode 100644
index ae100c74b2..0000000000
--- a/docs/source/pype.tools.settings.settings.widgets.lib.rst
+++ /dev/null
@@ -1,7 +0,0 @@
-pype.tools.settings.settings.widgets.lib module
-===============================================
-
-.. automodule:: pype.tools.settings.settings.widgets.lib
- :members:
- :undoc-members:
- :show-inheritance:
diff --git a/docs/source/pype.tools.settings.settings.widgets.multiselection_combobox.rst b/docs/source/pype.tools.settings.settings.widgets.multiselection_combobox.rst
deleted file mode 100644
index 004f2ae21f..0000000000
--- a/docs/source/pype.tools.settings.settings.widgets.multiselection_combobox.rst
+++ /dev/null
@@ -1,7 +0,0 @@
-pype.tools.settings.settings.widgets.multiselection\_combobox module
-====================================================================
-
-.. automodule:: pype.tools.settings.settings.widgets.multiselection_combobox
- :members:
- :undoc-members:
- :show-inheritance:
diff --git a/docs/source/pype.tools.settings.settings.widgets.rst b/docs/source/pype.tools.settings.settings.widgets.rst
deleted file mode 100644
index 8734280a08..0000000000
--- a/docs/source/pype.tools.settings.settings.widgets.rst
+++ /dev/null
@@ -1,74 +0,0 @@
-pype.tools.settings.settings.widgets package
-============================================
-
-.. automodule:: pype.tools.settings.settings.widgets
- :members:
- :undoc-members:
- :show-inheritance:
-
-Submodules
-----------
-
-pype.tools.settings.settings.widgets.anatomy\_types module
-----------------------------------------------------------
-
-.. automodule:: pype.tools.settings.settings.widgets.anatomy_types
- :members:
- :undoc-members:
- :show-inheritance:
-
-pype.tools.settings.settings.widgets.base module
-------------------------------------------------
-
-.. automodule:: pype.tools.settings.settings.widgets.base
- :members:
- :undoc-members:
- :show-inheritance:
-
-pype.tools.settings.settings.widgets.item\_types module
--------------------------------------------------------
-
-.. automodule:: pype.tools.settings.settings.widgets.item_types
- :members:
- :undoc-members:
- :show-inheritance:
-
-pype.tools.settings.settings.widgets.lib module
------------------------------------------------
-
-.. automodule:: pype.tools.settings.settings.widgets.lib
- :members:
- :undoc-members:
- :show-inheritance:
-
-pype.tools.settings.settings.widgets.multiselection\_combobox module
---------------------------------------------------------------------
-
-.. automodule:: pype.tools.settings.settings.widgets.multiselection_combobox
- :members:
- :undoc-members:
- :show-inheritance:
-
-pype.tools.settings.settings.widgets.tests module
--------------------------------------------------
-
-.. automodule:: pype.tools.settings.settings.widgets.tests
- :members:
- :undoc-members:
- :show-inheritance:
-
-pype.tools.settings.settings.widgets.widgets module
----------------------------------------------------
-
-.. automodule:: pype.tools.settings.settings.widgets.widgets
- :members:
- :undoc-members:
- :show-inheritance:
-
-pype.tools.settings.settings.widgets.window module
---------------------------------------------------
-
-.. automodule:: pype.tools.settings.settings.widgets.window
- :members:
- :undoc-members:
- :show-inheritance:
diff --git a/docs/source/pype.tools.settings.settings.widgets.tests.rst b/docs/source/pype.tools.settings.settings.widgets.tests.rst
deleted file mode 100644
index fe8d6dabef..0000000000
--- a/docs/source/pype.tools.settings.settings.widgets.tests.rst
+++ /dev/null
@@ -1,7 +0,0 @@
-pype.tools.settings.settings.widgets.tests module
-=================================================
-
-.. automodule:: pype.tools.settings.settings.widgets.tests
- :members:
- :undoc-members:
- :show-inheritance:
diff --git a/docs/source/pype.tools.settings.settings.widgets.widgets.rst b/docs/source/pype.tools.settings.settings.widgets.widgets.rst
deleted file mode 100644
index 238e584ac3..0000000000
--- a/docs/source/pype.tools.settings.settings.widgets.widgets.rst
+++ /dev/null
@@ -1,7 +0,0 @@
-pype.tools.settings.settings.widgets.widgets module
-===================================================
-
-.. automodule:: pype.tools.settings.settings.widgets.widgets
- :members:
- :undoc-members:
- :show-inheritance:
diff --git a/docs/source/pype.tools.settings.settings.widgets.window.rst b/docs/source/pype.tools.settings.settings.widgets.window.rst
deleted file mode 100644
index d67678012f..0000000000
--- a/docs/source/pype.tools.settings.settings.widgets.window.rst
+++ /dev/null
@@ -1,7 +0,0 @@
-pype.tools.settings.settings.widgets.window module
-==================================================
-
-.. automodule:: pype.tools.settings.settings.widgets.window
- :members:
- :undoc-members:
- :show-inheritance:
diff --git a/docs/source/pype.tools.standalonepublish.app.rst b/docs/source/pype.tools.standalonepublish.app.rst
deleted file mode 100644
index 74776b80fe..0000000000
--- a/docs/source/pype.tools.standalonepublish.app.rst
+++ /dev/null
@@ -1,7 +0,0 @@
-pype.tools.standalonepublish.app module
-=======================================
-
-.. automodule:: pype.tools.standalonepublish.app
- :members:
- :undoc-members:
- :show-inheritance:
diff --git a/docs/source/pype.tools.standalonepublish.publish.rst b/docs/source/pype.tools.standalonepublish.publish.rst
deleted file mode 100644
index 47ad57e7fb..0000000000
--- a/docs/source/pype.tools.standalonepublish.publish.rst
+++ /dev/null
@@ -1,7 +0,0 @@
-pype.tools.standalonepublish.publish module
-===========================================
-
-.. automodule:: pype.tools.standalonepublish.publish
- :members:
- :undoc-members:
- :show-inheritance:
diff --git a/docs/source/pype.tools.standalonepublish.rst b/docs/source/pype.tools.standalonepublish.rst
deleted file mode 100644
index 5ca8194b61..0000000000
--- a/docs/source/pype.tools.standalonepublish.rst
+++ /dev/null
@@ -1,34 +0,0 @@
-pype.tools.standalonepublish package
-====================================
-
-.. automodule:: pype.tools.standalonepublish
- :members:
- :undoc-members:
- :show-inheritance:
-
-Subpackages
------------
-
-.. toctree::
- :maxdepth: 6
-
- pype.tools.standalonepublish.widgets
-
-Submodules
-----------
-
-pype.tools.standalonepublish.app module
----------------------------------------
-
-.. automodule:: pype.tools.standalonepublish.app
- :members:
- :undoc-members:
- :show-inheritance:
-
-pype.tools.standalonepublish.publish module
--------------------------------------------
-
-.. automodule:: pype.tools.standalonepublish.publish
- :members:
- :undoc-members:
- :show-inheritance:
diff --git a/docs/source/pype.tools.standalonepublish.widgets.model_asset.rst b/docs/source/pype.tools.standalonepublish.widgets.model_asset.rst
deleted file mode 100644
index 84d0ca2d93..0000000000
--- a/docs/source/pype.tools.standalonepublish.widgets.model_asset.rst
+++ /dev/null
@@ -1,7 +0,0 @@
-pype.tools.standalonepublish.widgets.model\_asset module
-========================================================
-
-.. automodule:: pype.tools.standalonepublish.widgets.model_asset
- :members:
- :undoc-members:
- :show-inheritance:
diff --git a/docs/source/pype.tools.standalonepublish.widgets.model_filter_proxy_exact_match.rst b/docs/source/pype.tools.standalonepublish.widgets.model_filter_proxy_exact_match.rst
deleted file mode 100644
index 0c3ae79b99..0000000000
--- a/docs/source/pype.tools.standalonepublish.widgets.model_filter_proxy_exact_match.rst
+++ /dev/null
@@ -1,7 +0,0 @@
-pype.tools.standalonepublish.widgets.model\_filter\_proxy\_exact\_match module
-==============================================================================
-
-.. automodule:: pype.tools.standalonepublish.widgets.model_filter_proxy_exact_match
- :members:
- :undoc-members:
- :show-inheritance:
diff --git a/docs/source/pype.tools.standalonepublish.widgets.model_filter_proxy_recursive_sort.rst b/docs/source/pype.tools.standalonepublish.widgets.model_filter_proxy_recursive_sort.rst
deleted file mode 100644
index b828b75030..0000000000
--- a/docs/source/pype.tools.standalonepublish.widgets.model_filter_proxy_recursive_sort.rst
+++ /dev/null
@@ -1,7 +0,0 @@
-pype.tools.standalonepublish.widgets.model\_filter\_proxy\_recursive\_sort module
-=================================================================================
-
-.. automodule:: pype.tools.standalonepublish.widgets.model_filter_proxy_recursive_sort
- :members:
- :undoc-members:
- :show-inheritance:
diff --git a/docs/source/pype.tools.standalonepublish.widgets.model_node.rst b/docs/source/pype.tools.standalonepublish.widgets.model_node.rst
deleted file mode 100644
index 4789b14501..0000000000
--- a/docs/source/pype.tools.standalonepublish.widgets.model_node.rst
+++ /dev/null
@@ -1,7 +0,0 @@
-pype.tools.standalonepublish.widgets.model\_node module
-=======================================================
-
-.. automodule:: pype.tools.standalonepublish.widgets.model_node
- :members:
- :undoc-members:
- :show-inheritance:
diff --git a/docs/source/pype.tools.standalonepublish.widgets.model_tasks_template.rst b/docs/source/pype.tools.standalonepublish.widgets.model_tasks_template.rst
deleted file mode 100644
index dbee838530..0000000000
--- a/docs/source/pype.tools.standalonepublish.widgets.model_tasks_template.rst
+++ /dev/null
@@ -1,7 +0,0 @@
-pype.tools.standalonepublish.widgets.model\_tasks\_template module
-==================================================================
-
-.. automodule:: pype.tools.standalonepublish.widgets.model_tasks_template
- :members:
- :undoc-members:
- :show-inheritance:
diff --git a/docs/source/pype.tools.standalonepublish.widgets.model_tree.rst b/docs/source/pype.tools.standalonepublish.widgets.model_tree.rst
deleted file mode 100644
index 38eecb095a..0000000000
--- a/docs/source/pype.tools.standalonepublish.widgets.model_tree.rst
+++ /dev/null
@@ -1,7 +0,0 @@
-pype.tools.standalonepublish.widgets.model\_tree module
-=======================================================
-
-.. automodule:: pype.tools.standalonepublish.widgets.model_tree
- :members:
- :undoc-members:
- :show-inheritance:
diff --git a/docs/source/pype.tools.standalonepublish.widgets.model_tree_view_deselectable.rst b/docs/source/pype.tools.standalonepublish.widgets.model_tree_view_deselectable.rst
deleted file mode 100644
index 9afb505113..0000000000
--- a/docs/source/pype.tools.standalonepublish.widgets.model_tree_view_deselectable.rst
+++ /dev/null
@@ -1,7 +0,0 @@
-pype.tools.standalonepublish.widgets.model\_tree\_view\_deselectable module
-===========================================================================
-
-.. automodule:: pype.tools.standalonepublish.widgets.model_tree_view_deselectable
- :members:
- :undoc-members:
- :show-inheritance:
diff --git a/docs/source/pype.tools.standalonepublish.widgets.resources.rst b/docs/source/pype.tools.standalonepublish.widgets.resources.rst
deleted file mode 100644
index a0eddae63e..0000000000
--- a/docs/source/pype.tools.standalonepublish.widgets.resources.rst
+++ /dev/null
@@ -1,7 +0,0 @@
-pype.tools.standalonepublish.widgets.resources package
-======================================================
-
-.. automodule:: pype.tools.standalonepublish.widgets.resources
- :members:
- :undoc-members:
- :show-inheritance:
diff --git a/docs/source/pype.tools.standalonepublish.widgets.rst b/docs/source/pype.tools.standalonepublish.widgets.rst
deleted file mode 100644
index 65bbcb62fc..0000000000
--- a/docs/source/pype.tools.standalonepublish.widgets.rst
+++ /dev/null
@@ -1,146 +0,0 @@
-pype.tools.standalonepublish.widgets package
-============================================
-
-.. automodule:: pype.tools.standalonepublish.widgets
- :members:
- :undoc-members:
- :show-inheritance:
-
-Subpackages
------------
-
-.. toctree::
- :maxdepth: 6
-
- pype.tools.standalonepublish.widgets.resources
-
-Submodules
-----------
-
-pype.tools.standalonepublish.widgets.model\_asset module
---------------------------------------------------------
-
-.. automodule:: pype.tools.standalonepublish.widgets.model_asset
- :members:
- :undoc-members:
- :show-inheritance:
-
-pype.tools.standalonepublish.widgets.model\_filter\_proxy\_exact\_match module
-------------------------------------------------------------------------------
-
-.. automodule:: pype.tools.standalonepublish.widgets.model_filter_proxy_exact_match
- :members:
- :undoc-members:
- :show-inheritance:
-
-pype.tools.standalonepublish.widgets.model\_filter\_proxy\_recursive\_sort module
----------------------------------------------------------------------------------
-
-.. automodule:: pype.tools.standalonepublish.widgets.model_filter_proxy_recursive_sort
- :members:
- :undoc-members:
- :show-inheritance:
-
-pype.tools.standalonepublish.widgets.model\_node module
--------------------------------------------------------
-
-.. automodule:: pype.tools.standalonepublish.widgets.model_node
- :members:
- :undoc-members:
- :show-inheritance:
-
-pype.tools.standalonepublish.widgets.model\_tasks\_template module
-------------------------------------------------------------------
-
-.. automodule:: pype.tools.standalonepublish.widgets.model_tasks_template
- :members:
- :undoc-members:
- :show-inheritance:
-
-pype.tools.standalonepublish.widgets.model\_tree module
--------------------------------------------------------
-
-.. automodule:: pype.tools.standalonepublish.widgets.model_tree
- :members:
- :undoc-members:
- :show-inheritance:
-
-pype.tools.standalonepublish.widgets.model\_tree\_view\_deselectable module
----------------------------------------------------------------------------
-
-.. automodule:: pype.tools.standalonepublish.widgets.model_tree_view_deselectable
- :members:
- :undoc-members:
- :show-inheritance:
-
-pype.tools.standalonepublish.widgets.widget\_asset module
----------------------------------------------------------
-
-.. automodule:: pype.tools.standalonepublish.widgets.widget_asset
- :members:
- :undoc-members:
- :show-inheritance:
-
-pype.tools.standalonepublish.widgets.widget\_component\_item module
--------------------------------------------------------------------
-
-.. automodule:: pype.tools.standalonepublish.widgets.widget_component_item
- :members:
- :undoc-members:
- :show-inheritance:
-
-pype.tools.standalonepublish.widgets.widget\_components module
---------------------------------------------------------------
-
-.. automodule:: pype.tools.standalonepublish.widgets.widget_components
- :members:
- :undoc-members:
- :show-inheritance:
-
-pype.tools.standalonepublish.widgets.widget\_components\_list module
---------------------------------------------------------------------
-
-.. automodule:: pype.tools.standalonepublish.widgets.widget_components_list
- :members:
- :undoc-members:
- :show-inheritance:
-
-pype.tools.standalonepublish.widgets.widget\_drop\_empty module
----------------------------------------------------------------
-
-.. automodule:: pype.tools.standalonepublish.widgets.widget_drop_empty
- :members:
- :undoc-members:
- :show-inheritance:
-
-pype.tools.standalonepublish.widgets.widget\_drop\_frame module
----------------------------------------------------------------
-
-.. automodule:: pype.tools.standalonepublish.widgets.widget_drop_frame
- :members:
- :undoc-members:
- :show-inheritance:
-
-pype.tools.standalonepublish.widgets.widget\_family module
-----------------------------------------------------------
-
-.. automodule:: pype.tools.standalonepublish.widgets.widget_family
- :members:
- :undoc-members:
- :show-inheritance:
-
-pype.tools.standalonepublish.widgets.widget\_family\_desc module
-----------------------------------------------------------------
-
-.. automodule:: pype.tools.standalonepublish.widgets.widget_family_desc
- :members:
- :undoc-members:
- :show-inheritance:
-
-pype.tools.standalonepublish.widgets.widget\_shadow module
-----------------------------------------------------------
-
-.. automodule:: pype.tools.standalonepublish.widgets.widget_shadow
- :members:
- :undoc-members:
- :show-inheritance:
diff --git a/docs/source/pype.tools.standalonepublish.widgets.widget_asset.rst b/docs/source/pype.tools.standalonepublish.widgets.widget_asset.rst
deleted file mode 100644
index 51a3763628..0000000000
--- a/docs/source/pype.tools.standalonepublish.widgets.widget_asset.rst
+++ /dev/null
@@ -1,7 +0,0 @@
-pype.tools.standalonepublish.widgets.widget\_asset module
-=========================================================
-
-.. automodule:: pype.tools.standalonepublish.widgets.widget_asset
- :members:
- :undoc-members:
- :show-inheritance:
diff --git a/docs/source/pype.tools.standalonepublish.widgets.widget_component_item.rst b/docs/source/pype.tools.standalonepublish.widgets.widget_component_item.rst
deleted file mode 100644
index 3495fdf192..0000000000
--- a/docs/source/pype.tools.standalonepublish.widgets.widget_component_item.rst
+++ /dev/null
@@ -1,7 +0,0 @@
-pype.tools.standalonepublish.widgets.widget\_component\_item module
-===================================================================
-
-.. automodule:: pype.tools.standalonepublish.widgets.widget_component_item
- :members:
- :undoc-members:
- :show-inheritance:
diff --git a/docs/source/pype.tools.standalonepublish.widgets.widget_components.rst b/docs/source/pype.tools.standalonepublish.widgets.widget_components.rst
deleted file mode 100644
index be7c19af9f..0000000000
--- a/docs/source/pype.tools.standalonepublish.widgets.widget_components.rst
+++ /dev/null
@@ -1,7 +0,0 @@
-pype.tools.standalonepublish.widgets.widget\_components module
-==============================================================
-
-.. automodule:: pype.tools.standalonepublish.widgets.widget_components
- :members:
- :undoc-members:
- :show-inheritance:
diff --git a/docs/source/pype.tools.standalonepublish.widgets.widget_components_list.rst b/docs/source/pype.tools.standalonepublish.widgets.widget_components_list.rst
deleted file mode 100644
index 051efe07fe..0000000000
--- a/docs/source/pype.tools.standalonepublish.widgets.widget_components_list.rst
+++ /dev/null
@@ -1,7 +0,0 @@
-pype.tools.standalonepublish.widgets.widget\_components\_list module
-====================================================================
-
-.. automodule:: pype.tools.standalonepublish.widgets.widget_components_list
- :members:
- :undoc-members:
- :show-inheritance:
diff --git a/docs/source/pype.tools.standalonepublish.widgets.widget_drop_empty.rst b/docs/source/pype.tools.standalonepublish.widgets.widget_drop_empty.rst
deleted file mode 100644
index b5b0a6acac..0000000000
--- a/docs/source/pype.tools.standalonepublish.widgets.widget_drop_empty.rst
+++ /dev/null
@@ -1,7 +0,0 @@
-pype.tools.standalonepublish.widgets.widget\_drop\_empty module
-===============================================================
-
-.. automodule:: pype.tools.standalonepublish.widgets.widget_drop_empty
- :members:
- :undoc-members:
- :show-inheritance:
diff --git a/docs/source/pype.tools.standalonepublish.widgets.widget_drop_frame.rst b/docs/source/pype.tools.standalonepublish.widgets.widget_drop_frame.rst
deleted file mode 100644
index 6b3e3690e0..0000000000
--- a/docs/source/pype.tools.standalonepublish.widgets.widget_drop_frame.rst
+++ /dev/null
@@ -1,7 +0,0 @@
-pype.tools.standalonepublish.widgets.widget\_drop\_frame module
-===============================================================
-
-.. automodule:: pype.tools.standalonepublish.widgets.widget_drop_frame
- :members:
- :undoc-members:
- :show-inheritance:
diff --git a/docs/source/pype.tools.standalonepublish.widgets.widget_family.rst b/docs/source/pype.tools.standalonepublish.widgets.widget_family.rst
deleted file mode 100644
index 24c9d5496f..0000000000
--- a/docs/source/pype.tools.standalonepublish.widgets.widget_family.rst
+++ /dev/null
@@ -1,7 +0,0 @@
-pype.tools.standalonepublish.widgets.widget\_family module
-==========================================================
-
-.. automodule:: pype.tools.standalonepublish.widgets.widget_family
- :members:
- :undoc-members:
- :show-inheritance:
diff --git a/docs/source/pype.tools.standalonepublish.widgets.widget_family_desc.rst b/docs/source/pype.tools.standalonepublish.widgets.widget_family_desc.rst
deleted file mode 100644
index 5a7f92177f..0000000000
--- a/docs/source/pype.tools.standalonepublish.widgets.widget_family_desc.rst
+++ /dev/null
@@ -1,7 +0,0 @@
-pype.tools.standalonepublish.widgets.widget\_family\_desc module
-================================================================
-
-.. automodule:: pype.tools.standalonepublish.widgets.widget_family_desc
- :members:
- :undoc-members:
- :show-inheritance:
diff --git a/docs/source/pype.tools.standalonepublish.widgets.widget_shadow.rst b/docs/source/pype.tools.standalonepublish.widgets.widget_shadow.rst
deleted file mode 100644
index 19f5c22198..0000000000
--- a/docs/source/pype.tools.standalonepublish.widgets.widget_shadow.rst
+++ /dev/null
@@ -1,7 +0,0 @@
-pype.tools.standalonepublish.widgets.widget\_shadow module
-==========================================================
-
-.. automodule:: pype.tools.standalonepublish.widgets.widget_shadow
- :members:
- :undoc-members:
- :show-inheritance:
diff --git a/docs/source/pype.tools.tray.pype_tray.rst b/docs/source/pype.tools.tray.pype_tray.rst
deleted file mode 100644
index 9fc49c5763..0000000000
--- a/docs/source/pype.tools.tray.pype_tray.rst
+++ /dev/null
@@ -1,7 +0,0 @@
-pype.tools.tray.pype\_tray module
-=================================
-
-.. automodule:: pype.tools.tray.pype_tray
- :members:
- :undoc-members:
- :show-inheritance:
diff --git a/docs/source/pype.tools.tray.rst b/docs/source/pype.tools.tray.rst
deleted file mode 100644
index b28059d170..0000000000
--- a/docs/source/pype.tools.tray.rst
+++ /dev/null
@@ -1,15 +0,0 @@
-pype.tools.tray package
-=======================
-
-.. automodule:: pype.tools.tray
- :members:
- :undoc-members:
- :show-inheritance:
-
-Submodules
-----------
-
-.. toctree::
- :maxdepth: 10
-
- pype.tools.tray.pype_tray
diff --git a/docs/source/pype.tools.workfiles.app.rst b/docs/source/pype.tools.workfiles.app.rst
deleted file mode 100644
index a3a46b8a07..0000000000
--- a/docs/source/pype.tools.workfiles.app.rst
+++ /dev/null
@@ -1,7 +0,0 @@
-pype.tools.workfiles.app module
-===============================
-
-.. automodule:: pype.tools.workfiles.app
- :members:
- :undoc-members:
- :show-inheritance:
diff --git a/docs/source/pype.tools.workfiles.model.rst b/docs/source/pype.tools.workfiles.model.rst
deleted file mode 100644
index 44cea32b97..0000000000
--- a/docs/source/pype.tools.workfiles.model.rst
+++ /dev/null
@@ -1,7 +0,0 @@
-pype.tools.workfiles.model module
-=================================
-
-.. automodule:: pype.tools.workfiles.model
- :members:
- :undoc-members:
- :show-inheritance:
diff --git a/docs/source/pype.tools.workfiles.rst b/docs/source/pype.tools.workfiles.rst
deleted file mode 100644
index 147c4cebbe..0000000000
--- a/docs/source/pype.tools.workfiles.rst
+++ /dev/null
@@ -1,17 +0,0 @@
-pype.tools.workfiles package
-============================
-
-.. automodule:: pype.tools.workfiles
- :members:
- :undoc-members:
- :show-inheritance:
-
-Submodules
-----------
-
-.. toctree::
- :maxdepth: 10
-
- pype.tools.workfiles.app
- pype.tools.workfiles.model
- pype.tools.workfiles.view
diff --git a/docs/source/pype.tools.workfiles.view.rst b/docs/source/pype.tools.workfiles.view.rst
deleted file mode 100644
index acd32ed250..0000000000
--- a/docs/source/pype.tools.workfiles.view.rst
+++ /dev/null
@@ -1,7 +0,0 @@
-pype.tools.workfiles.view module
-================================
-
-.. automodule:: pype.tools.workfiles.view
- :members:
- :undoc-members:
- :show-inheritance:
diff --git a/docs/source/pype.vendor.backports.configparser.helpers.rst b/docs/source/pype.vendor.backports.configparser.helpers.rst
deleted file mode 100644
index 8d44d0a8c4..0000000000
--- a/docs/source/pype.vendor.backports.configparser.helpers.rst
+++ /dev/null
@@ -1,7 +0,0 @@
-pype.vendor.backports.configparser.helpers module
-=================================================
-
-.. automodule:: pype.vendor.backports.configparser.helpers
- :members:
- :undoc-members:
- :show-inheritance:
diff --git a/docs/source/pype.vendor.backports.configparser.rst b/docs/source/pype.vendor.backports.configparser.rst
deleted file mode 100644
index 4f778a4a87..0000000000
--- a/docs/source/pype.vendor.backports.configparser.rst
+++ /dev/null
@@ -1,18 +0,0 @@
-pype.vendor.backports.configparser package
-==========================================
-
-.. automodule:: pype.vendor.backports.configparser
- :members:
- :undoc-members:
- :show-inheritance:
-
-Submodules
-----------
-
-pype.vendor.backports.configparser.helpers module
--------------------------------------------------
-
-.. automodule:: pype.vendor.backports.configparser.helpers
- :members:
- :undoc-members:
- :show-inheritance:
diff --git a/docs/source/pype.vendor.backports.functools_lru_cache.rst b/docs/source/pype.vendor.backports.functools_lru_cache.rst
deleted file mode 100644
index 26f2746cec..0000000000
--- a/docs/source/pype.vendor.backports.functools_lru_cache.rst
+++ /dev/null
@@ -1,7 +0,0 @@
-pype.vendor.backports.functools\_lru\_cache module
-==================================================
-
-.. automodule:: pype.vendor.backports.functools_lru_cache
- :members:
- :undoc-members:
- :show-inheritance:
diff --git a/docs/source/pype.vendor.backports.rst b/docs/source/pype.vendor.backports.rst
deleted file mode 100644
index ff9efc29c5..0000000000
--- a/docs/source/pype.vendor.backports.rst
+++ /dev/null
@@ -1,26 +0,0 @@
-pype.vendor.backports package
-=============================
-
-.. automodule:: pype.vendor.backports
- :members:
- :undoc-members:
- :show-inheritance:
-
-Subpackages
------------
-
-.. toctree::
- :maxdepth: 6
-
- pype.vendor.backports.configparser
-
-Submodules
-----------
-
-pype.vendor.backports.functools\_lru\_cache module
---------------------------------------------------
-
-.. automodule:: pype.vendor.backports.functools_lru_cache
- :members:
- :undoc-members:
- :show-inheritance:
diff --git a/docs/source/pype.vendor.builtins.rst b/docs/source/pype.vendor.builtins.rst
deleted file mode 100644
index e21fb768ed..0000000000
--- a/docs/source/pype.vendor.builtins.rst
+++ /dev/null
@@ -1,7 +0,0 @@
-pype.vendor.builtins package
-============================
-
-.. automodule:: pype.vendor.builtins
- :members:
- :undoc-members:
- :show-inheritance:
diff --git a/docs/source/pype.vendor.capture.rst b/docs/source/pype.vendor.capture.rst
deleted file mode 100644
index d42e073fb5..0000000000
--- a/docs/source/pype.vendor.capture.rst
+++ /dev/null
@@ -1,7 +0,0 @@
-pype.vendor.capture module
-==========================
-
-.. automodule:: pype.vendor.capture
- :members:
- :undoc-members:
- :show-inheritance:
diff --git a/docs/source/pype.vendor.capture_gui.accordion.rst b/docs/source/pype.vendor.capture_gui.accordion.rst
deleted file mode 100644
index cca228f151..0000000000
--- a/docs/source/pype.vendor.capture_gui.accordion.rst
+++ /dev/null
@@ -1,7 +0,0 @@
-pype.vendor.capture\_gui.accordion module
-=========================================
-
-.. automodule:: pype.vendor.capture_gui.accordion
- :members:
- :undoc-members:
- :show-inheritance:
diff --git a/docs/source/pype.vendor.capture_gui.app.rst b/docs/source/pype.vendor.capture_gui.app.rst
deleted file mode 100644
index 291296834e..0000000000
--- a/docs/source/pype.vendor.capture_gui.app.rst
+++ /dev/null
@@ -1,7 +0,0 @@
-pype.vendor.capture\_gui.app module
-===================================
-
-.. automodule:: pype.vendor.capture_gui.app
- :members:
- :undoc-members:
- :show-inheritance:
diff --git a/docs/source/pype.vendor.capture_gui.colorpicker.rst b/docs/source/pype.vendor.capture_gui.colorpicker.rst
deleted file mode 100644
index c9e56500f2..0000000000
--- a/docs/source/pype.vendor.capture_gui.colorpicker.rst
+++ /dev/null
@@ -1,7 +0,0 @@
-pype.vendor.capture\_gui.colorpicker module
-===========================================
-
-.. automodule:: pype.vendor.capture_gui.colorpicker
- :members:
- :undoc-members:
- :show-inheritance:
diff --git a/docs/source/pype.vendor.capture_gui.lib.rst b/docs/source/pype.vendor.capture_gui.lib.rst
deleted file mode 100644
index e94a3bd196..0000000000
--- a/docs/source/pype.vendor.capture_gui.lib.rst
+++ /dev/null
@@ -1,7 +0,0 @@
-pype.vendor.capture\_gui.lib module
-===================================
-
-.. automodule:: pype.vendor.capture_gui.lib
- :members:
- :undoc-members:
- :show-inheritance:
diff --git a/docs/source/pype.vendor.capture_gui.plugin.rst b/docs/source/pype.vendor.capture_gui.plugin.rst
deleted file mode 100644
index 2e8f58c873..0000000000
--- a/docs/source/pype.vendor.capture_gui.plugin.rst
+++ /dev/null
@@ -1,7 +0,0 @@
-pype.vendor.capture\_gui.plugin module
-======================================
-
-.. automodule:: pype.vendor.capture_gui.plugin
- :members:
- :undoc-members:
- :show-inheritance:
diff --git a/docs/source/pype.vendor.capture_gui.presets.rst b/docs/source/pype.vendor.capture_gui.presets.rst
deleted file mode 100644
index c81b4e1c23..0000000000
--- a/docs/source/pype.vendor.capture_gui.presets.rst
+++ /dev/null
@@ -1,7 +0,0 @@
-pype.vendor.capture\_gui.presets module
-=======================================
-
-.. automodule:: pype.vendor.capture_gui.presets
- :members:
- :undoc-members:
- :show-inheritance:
diff --git a/docs/source/pype.vendor.capture_gui.rst b/docs/source/pype.vendor.capture_gui.rst
deleted file mode 100644
index f7efce3501..0000000000
--- a/docs/source/pype.vendor.capture_gui.rst
+++ /dev/null
@@ -1,82 +0,0 @@
-pype.vendor.capture\_gui package
-================================
-
-.. automodule:: pype.vendor.capture_gui
- :members:
- :undoc-members:
- :show-inheritance:
-
-Subpackages
------------
-
-.. toctree::
- :maxdepth: 6
-
- pype.vendor.capture_gui.vendor
-
-Submodules
-----------
-
-pype.vendor.capture\_gui.accordion module
------------------------------------------
-
-.. automodule:: pype.vendor.capture_gui.accordion
- :members:
- :undoc-members:
- :show-inheritance:
-
-pype.vendor.capture\_gui.app module
------------------------------------
-
-.. automodule:: pype.vendor.capture_gui.app
- :members:
- :undoc-members:
- :show-inheritance:
-
-pype.vendor.capture\_gui.colorpicker module
--------------------------------------------
-
-.. automodule:: pype.vendor.capture_gui.colorpicker
- :members:
- :undoc-members:
- :show-inheritance:
-
-pype.vendor.capture\_gui.lib module
------------------------------------
-
-.. automodule:: pype.vendor.capture_gui.lib
- :members:
- :undoc-members:
- :show-inheritance:
-
-pype.vendor.capture\_gui.plugin module
---------------------------------------
-
-.. automodule:: pype.vendor.capture_gui.plugin
- :members:
- :undoc-members:
- :show-inheritance:
-
-pype.vendor.capture\_gui.presets module
----------------------------------------
-
-.. automodule:: pype.vendor.capture_gui.presets
- :members:
- :undoc-members:
- :show-inheritance:
-
-pype.vendor.capture\_gui.tokens module
---------------------------------------
-
-.. automodule:: pype.vendor.capture_gui.tokens
- :members:
- :undoc-members:
- :show-inheritance:
-
-pype.vendor.capture\_gui.version module
----------------------------------------
-
-.. automodule:: pype.vendor.capture_gui.version
- :members:
- :undoc-members:
- :show-inheritance:
diff --git a/docs/source/pype.vendor.capture_gui.tokens.rst b/docs/source/pype.vendor.capture_gui.tokens.rst
deleted file mode 100644
index 9e144a4d37..0000000000
--- a/docs/source/pype.vendor.capture_gui.tokens.rst
+++ /dev/null
@@ -1,7 +0,0 @@
-pype.vendor.capture\_gui.tokens module
-======================================
-
-.. automodule:: pype.vendor.capture_gui.tokens
- :members:
- :undoc-members:
- :show-inheritance:
diff --git a/docs/source/pype.vendor.capture_gui.vendor.Qt.rst b/docs/source/pype.vendor.capture_gui.vendor.Qt.rst
deleted file mode 100644
index 447e6dd812..0000000000
--- a/docs/source/pype.vendor.capture_gui.vendor.Qt.rst
+++ /dev/null
@@ -1,7 +0,0 @@
-pype.vendor.capture\_gui.vendor.Qt module
-=========================================
-
-.. automodule:: pype.vendor.capture_gui.vendor.Qt
- :members:
- :undoc-members:
- :show-inheritance:
diff --git a/docs/source/pype.vendor.capture_gui.vendor.rst b/docs/source/pype.vendor.capture_gui.vendor.rst
deleted file mode 100644
index 0befc4bbb7..0000000000
--- a/docs/source/pype.vendor.capture_gui.vendor.rst
+++ /dev/null
@@ -1,18 +0,0 @@
-pype.vendor.capture\_gui.vendor package
-=======================================
-
-.. automodule:: pype.vendor.capture_gui.vendor
- :members:
- :undoc-members:
- :show-inheritance:
-
-Submodules
-----------
-
-pype.vendor.capture\_gui.vendor.Qt module
------------------------------------------
-
-.. automodule:: pype.vendor.capture_gui.vendor.Qt
- :members:
- :undoc-members:
- :show-inheritance:
diff --git a/docs/source/pype.vendor.capture_gui.version.rst b/docs/source/pype.vendor.capture_gui.version.rst
deleted file mode 100644
index 3f0cfbabfd..0000000000
--- a/docs/source/pype.vendor.capture_gui.version.rst
+++ /dev/null
@@ -1,7 +0,0 @@
-pype.vendor.capture\_gui.version module
-=======================================
-
-.. automodule:: pype.vendor.capture_gui.version
- :members:
- :undoc-members:
- :show-inheritance:
diff --git a/docs/source/pype.vendor.ftrack_api_old.accessor.base.rst b/docs/source/pype.vendor.ftrack_api_old.accessor.base.rst
deleted file mode 100644
index 5155df82aa..0000000000
--- a/docs/source/pype.vendor.ftrack_api_old.accessor.base.rst
+++ /dev/null
@@ -1,7 +0,0 @@
-pype.vendor.ftrack\_api\_old.accessor.base module
-=================================================
-
-.. automodule:: pype.vendor.ftrack_api_old.accessor.base
- :members:
- :undoc-members:
- :show-inheritance:
diff --git a/docs/source/pype.vendor.ftrack_api_old.accessor.disk.rst b/docs/source/pype.vendor.ftrack_api_old.accessor.disk.rst
deleted file mode 100644
index 3040fe18fd..0000000000
--- a/docs/source/pype.vendor.ftrack_api_old.accessor.disk.rst
+++ /dev/null
@@ -1,7 +0,0 @@
-pype.vendor.ftrack\_api\_old.accessor.disk module
-=================================================
-
-.. automodule:: pype.vendor.ftrack_api_old.accessor.disk
- :members:
- :undoc-members:
- :show-inheritance:
diff --git a/docs/source/pype.vendor.ftrack_api_old.accessor.rst b/docs/source/pype.vendor.ftrack_api_old.accessor.rst
deleted file mode 100644
index 1f7b522460..0000000000
--- a/docs/source/pype.vendor.ftrack_api_old.accessor.rst
+++ /dev/null
@@ -1,34 +0,0 @@
-pype.vendor.ftrack\_api\_old.accessor package
-=============================================
-
-.. automodule:: pype.vendor.ftrack_api_old.accessor
- :members:
- :undoc-members:
- :show-inheritance:
-
-Submodules
-----------
-
-pype.vendor.ftrack\_api\_old.accessor.base module
--------------------------------------------------
-
-.. automodule:: pype.vendor.ftrack_api_old.accessor.base
- :members:
- :undoc-members:
- :show-inheritance:
-
-pype.vendor.ftrack\_api\_old.accessor.disk module
--------------------------------------------------
-
-.. automodule:: pype.vendor.ftrack_api_old.accessor.disk
- :members:
- :undoc-members:
- :show-inheritance:
-
-pype.vendor.ftrack\_api\_old.accessor.server module
----------------------------------------------------
-
-.. automodule:: pype.vendor.ftrack_api_old.accessor.server
- :members:
- :undoc-members:
- :show-inheritance:
diff --git a/docs/source/pype.vendor.ftrack_api_old.accessor.server.rst b/docs/source/pype.vendor.ftrack_api_old.accessor.server.rst
deleted file mode 100644
index db835f99c4..0000000000
--- a/docs/source/pype.vendor.ftrack_api_old.accessor.server.rst
+++ /dev/null
@@ -1,7 +0,0 @@
-pype.vendor.ftrack\_api\_old.accessor.server module
-===================================================
-
-.. automodule:: pype.vendor.ftrack_api_old.accessor.server
- :members:
- :undoc-members:
- :show-inheritance:
diff --git a/docs/source/pype.vendor.ftrack_api_old.attribute.rst b/docs/source/pype.vendor.ftrack_api_old.attribute.rst
deleted file mode 100644
index 54276ceb2a..0000000000
--- a/docs/source/pype.vendor.ftrack_api_old.attribute.rst
+++ /dev/null
@@ -1,7 +0,0 @@
-pype.vendor.ftrack\_api\_old.attribute module
-=============================================
-
-.. automodule:: pype.vendor.ftrack_api_old.attribute
- :members:
- :undoc-members:
- :show-inheritance:
diff --git a/docs/source/pype.vendor.ftrack_api_old.cache.rst b/docs/source/pype.vendor.ftrack_api_old.cache.rst
deleted file mode 100644
index 396bc5a1cd..0000000000
--- a/docs/source/pype.vendor.ftrack_api_old.cache.rst
+++ /dev/null
@@ -1,7 +0,0 @@
-pype.vendor.ftrack\_api\_old.cache module
-=========================================
-
-.. automodule:: pype.vendor.ftrack_api_old.cache
- :members:
- :undoc-members:
- :show-inheritance:
diff --git a/docs/source/pype.vendor.ftrack_api_old.collection.rst b/docs/source/pype.vendor.ftrack_api_old.collection.rst
deleted file mode 100644
index de911619fc..0000000000
--- a/docs/source/pype.vendor.ftrack_api_old.collection.rst
+++ /dev/null
@@ -1,7 +0,0 @@
-pype.vendor.ftrack\_api\_old.collection module
-==============================================
-
-.. automodule:: pype.vendor.ftrack_api_old.collection
- :members:
- :undoc-members:
- :show-inheritance:
diff --git a/docs/source/pype.vendor.ftrack_api_old.data.rst b/docs/source/pype.vendor.ftrack_api_old.data.rst
deleted file mode 100644
index 2f67185cee..0000000000
--- a/docs/source/pype.vendor.ftrack_api_old.data.rst
+++ /dev/null
@@ -1,7 +0,0 @@
-pype.vendor.ftrack\_api\_old.data module
-========================================
-
-.. automodule:: pype.vendor.ftrack_api_old.data
- :members:
- :undoc-members:
- :show-inheritance:
diff --git a/docs/source/pype.vendor.ftrack_api_old.entity.asset_version.rst b/docs/source/pype.vendor.ftrack_api_old.entity.asset_version.rst
deleted file mode 100644
index 7ad3d87fd9..0000000000
--- a/docs/source/pype.vendor.ftrack_api_old.entity.asset_version.rst
+++ /dev/null
@@ -1,7 +0,0 @@
-pype.vendor.ftrack\_api\_old.entity.asset\_version module
-=========================================================
-
-.. automodule:: pype.vendor.ftrack_api_old.entity.asset_version
- :members:
- :undoc-members:
- :show-inheritance:
diff --git a/docs/source/pype.vendor.ftrack_api_old.entity.base.rst b/docs/source/pype.vendor.ftrack_api_old.entity.base.rst
deleted file mode 100644
index b87428f817..0000000000
--- a/docs/source/pype.vendor.ftrack_api_old.entity.base.rst
+++ /dev/null
@@ -1,7 +0,0 @@
-pype.vendor.ftrack\_api\_old.entity.base module
-===============================================
-
-.. automodule:: pype.vendor.ftrack_api_old.entity.base
- :members:
- :undoc-members:
- :show-inheritance:
diff --git a/docs/source/pype.vendor.ftrack_api_old.entity.component.rst b/docs/source/pype.vendor.ftrack_api_old.entity.component.rst
deleted file mode 100644
index 27901ab786..0000000000
--- a/docs/source/pype.vendor.ftrack_api_old.entity.component.rst
+++ /dev/null
@@ -1,7 +0,0 @@
-pype.vendor.ftrack\_api\_old.entity.component module
-====================================================
-
-.. automodule:: pype.vendor.ftrack_api_old.entity.component
- :members:
- :undoc-members:
- :show-inheritance:
diff --git a/docs/source/pype.vendor.ftrack_api_old.entity.factory.rst b/docs/source/pype.vendor.ftrack_api_old.entity.factory.rst
deleted file mode 100644
index caada5c3c8..0000000000
--- a/docs/source/pype.vendor.ftrack_api_old.entity.factory.rst
+++ /dev/null
@@ -1,7 +0,0 @@
-pype.vendor.ftrack\_api\_old.entity.factory module
-==================================================
-
-.. automodule:: pype.vendor.ftrack_api_old.entity.factory
- :members:
- :undoc-members:
- :show-inheritance:
diff --git a/docs/source/pype.vendor.ftrack_api_old.entity.job.rst b/docs/source/pype.vendor.ftrack_api_old.entity.job.rst
deleted file mode 100644
index 6f4ca18323..0000000000
--- a/docs/source/pype.vendor.ftrack_api_old.entity.job.rst
+++ /dev/null
@@ -1,7 +0,0 @@
-pype.vendor.ftrack\_api\_old.entity.job module
-==============================================
-
-.. automodule:: pype.vendor.ftrack_api_old.entity.job
- :members:
- :undoc-members:
- :show-inheritance:
diff --git a/docs/source/pype.vendor.ftrack_api_old.entity.location.rst b/docs/source/pype.vendor.ftrack_api_old.entity.location.rst
deleted file mode 100644
index 2f0b380349..0000000000
--- a/docs/source/pype.vendor.ftrack_api_old.entity.location.rst
+++ /dev/null
@@ -1,7 +0,0 @@
-pype.vendor.ftrack\_api\_old.entity.location module
-===================================================
-
-.. automodule:: pype.vendor.ftrack_api_old.entity.location
- :members:
- :undoc-members:
- :show-inheritance:
diff --git a/docs/source/pype.vendor.ftrack_api_old.entity.note.rst b/docs/source/pype.vendor.ftrack_api_old.entity.note.rst
deleted file mode 100644
index c04e959402..0000000000
--- a/docs/source/pype.vendor.ftrack_api_old.entity.note.rst
+++ /dev/null
@@ -1,7 +0,0 @@
-pype.vendor.ftrack\_api\_old.entity.note module
-===============================================
-
-.. automodule:: pype.vendor.ftrack_api_old.entity.note
- :members:
- :undoc-members:
- :show-inheritance:
diff --git a/docs/source/pype.vendor.ftrack_api_old.entity.project_schema.rst b/docs/source/pype.vendor.ftrack_api_old.entity.project_schema.rst
deleted file mode 100644
index 6332a2e523..0000000000
--- a/docs/source/pype.vendor.ftrack_api_old.entity.project_schema.rst
+++ /dev/null
@@ -1,7 +0,0 @@
-pype.vendor.ftrack\_api\_old.entity.project\_schema module
-==========================================================
-
-.. automodule:: pype.vendor.ftrack_api_old.entity.project_schema
- :members:
- :undoc-members:
- :show-inheritance:
diff --git a/docs/source/pype.vendor.ftrack_api_old.entity.rst b/docs/source/pype.vendor.ftrack_api_old.entity.rst
deleted file mode 100644
index bb43a7621b..0000000000
--- a/docs/source/pype.vendor.ftrack_api_old.entity.rst
+++ /dev/null
@@ -1,82 +0,0 @@
-pype.vendor.ftrack\_api\_old.entity package
-===========================================
-
-.. automodule:: pype.vendor.ftrack_api_old.entity
- :members:
- :undoc-members:
- :show-inheritance:
-
-Submodules
-----------
-
-pype.vendor.ftrack\_api\_old.entity.asset\_version module
----------------------------------------------------------
-
-.. automodule:: pype.vendor.ftrack_api_old.entity.asset_version
- :members:
- :undoc-members:
- :show-inheritance:
-
-pype.vendor.ftrack\_api\_old.entity.base module
------------------------------------------------
-
-.. automodule:: pype.vendor.ftrack_api_old.entity.base
- :members:
- :undoc-members:
- :show-inheritance:
-
-pype.vendor.ftrack\_api\_old.entity.component module
-----------------------------------------------------
-
-.. automodule:: pype.vendor.ftrack_api_old.entity.component
- :members:
- :undoc-members:
- :show-inheritance:
-
-pype.vendor.ftrack\_api\_old.entity.factory module
---------------------------------------------------
-
-.. automodule:: pype.vendor.ftrack_api_old.entity.factory
- :members:
- :undoc-members:
- :show-inheritance:
-
-pype.vendor.ftrack\_api\_old.entity.job module
-----------------------------------------------
-
-.. automodule:: pype.vendor.ftrack_api_old.entity.job
- :members:
- :undoc-members:
- :show-inheritance:
-
-pype.vendor.ftrack\_api\_old.entity.location module
----------------------------------------------------
-
-.. automodule:: pype.vendor.ftrack_api_old.entity.location
- :members:
- :undoc-members:
- :show-inheritance:
-
-pype.vendor.ftrack\_api\_old.entity.note module
------------------------------------------------
-
-.. automodule:: pype.vendor.ftrack_api_old.entity.note
- :members:
- :undoc-members:
- :show-inheritance:
-
-pype.vendor.ftrack\_api\_old.entity.project\_schema module
-----------------------------------------------------------
-
-.. automodule:: pype.vendor.ftrack_api_old.entity.project_schema
- :members:
- :undoc-members:
- :show-inheritance:
-
-pype.vendor.ftrack\_api\_old.entity.user module
------------------------------------------------
-
-.. automodule:: pype.vendor.ftrack_api_old.entity.user
- :members:
- :undoc-members:
- :show-inheritance:
diff --git a/docs/source/pype.vendor.ftrack_api_old.entity.user.rst b/docs/source/pype.vendor.ftrack_api_old.entity.user.rst
deleted file mode 100644
index c0fe6574a6..0000000000
--- a/docs/source/pype.vendor.ftrack_api_old.entity.user.rst
+++ /dev/null
@@ -1,7 +0,0 @@
-pype.vendor.ftrack\_api\_old.entity.user module
-===============================================
-
-.. automodule:: pype.vendor.ftrack_api_old.entity.user
- :members:
- :undoc-members:
- :show-inheritance:
diff --git a/docs/source/pype.vendor.ftrack_api_old.event.base.rst b/docs/source/pype.vendor.ftrack_api_old.event.base.rst
deleted file mode 100644
index 74b86e3bb2..0000000000
--- a/docs/source/pype.vendor.ftrack_api_old.event.base.rst
+++ /dev/null
@@ -1,7 +0,0 @@
-pype.vendor.ftrack\_api\_old.event.base module
-==============================================
-
-.. automodule:: pype.vendor.ftrack_api_old.event.base
- :members:
- :undoc-members:
- :show-inheritance:
diff --git a/docs/source/pype.vendor.ftrack_api_old.event.expression.rst b/docs/source/pype.vendor.ftrack_api_old.event.expression.rst
deleted file mode 100644
index 860678797b..0000000000
--- a/docs/source/pype.vendor.ftrack_api_old.event.expression.rst
+++ /dev/null
@@ -1,7 +0,0 @@
-pype.vendor.ftrack\_api\_old.event.expression module
-====================================================
-
-.. automodule:: pype.vendor.ftrack_api_old.event.expression
- :members:
- :undoc-members:
- :show-inheritance:
diff --git a/docs/source/pype.vendor.ftrack_api_old.event.hub.rst b/docs/source/pype.vendor.ftrack_api_old.event.hub.rst
deleted file mode 100644
index d09d52eedf..0000000000
--- a/docs/source/pype.vendor.ftrack_api_old.event.hub.rst
+++ /dev/null
@@ -1,7 +0,0 @@
-pype.vendor.ftrack\_api\_old.event.hub module
-=============================================
-
-.. automodule:: pype.vendor.ftrack_api_old.event.hub
- :members:
- :undoc-members:
- :show-inheritance:
diff --git a/docs/source/pype.vendor.ftrack_api_old.event.rst b/docs/source/pype.vendor.ftrack_api_old.event.rst
deleted file mode 100644
index 2db27bf7f8..0000000000
--- a/docs/source/pype.vendor.ftrack_api_old.event.rst
+++ /dev/null
@@ -1,50 +0,0 @@
-pype.vendor.ftrack\_api\_old.event package
-==========================================
-
-.. automodule:: pype.vendor.ftrack_api_old.event
- :members:
- :undoc-members:
- :show-inheritance:
-
-Submodules
-----------
-
-pype.vendor.ftrack\_api\_old.event.base module
-----------------------------------------------
-
-.. automodule:: pype.vendor.ftrack_api_old.event.base
- :members:
- :undoc-members:
- :show-inheritance:
-
-pype.vendor.ftrack\_api\_old.event.expression module
-----------------------------------------------------
-
-.. automodule:: pype.vendor.ftrack_api_old.event.expression
- :members:
- :undoc-members:
- :show-inheritance:
-
-pype.vendor.ftrack\_api\_old.event.hub module
----------------------------------------------
-
-.. automodule:: pype.vendor.ftrack_api_old.event.hub
- :members:
- :undoc-members:
- :show-inheritance:
-
-pype.vendor.ftrack\_api\_old.event.subscriber module
-----------------------------------------------------
-
-.. automodule:: pype.vendor.ftrack_api_old.event.subscriber
- :members:
- :undoc-members:
- :show-inheritance:
-
-pype.vendor.ftrack\_api\_old.event.subscription module
-------------------------------------------------------
-
-.. automodule:: pype.vendor.ftrack_api_old.event.subscription
- :members:
- :undoc-members:
- :show-inheritance:
diff --git a/docs/source/pype.vendor.ftrack_api_old.event.subscriber.rst b/docs/source/pype.vendor.ftrack_api_old.event.subscriber.rst
deleted file mode 100644
index a9bd13aabc..0000000000
--- a/docs/source/pype.vendor.ftrack_api_old.event.subscriber.rst
+++ /dev/null
@@ -1,7 +0,0 @@
-pype.vendor.ftrack\_api\_old.event.subscriber module
-====================================================
-
-.. automodule:: pype.vendor.ftrack_api_old.event.subscriber
- :members:
- :undoc-members:
- :show-inheritance:
diff --git a/docs/source/pype.vendor.ftrack_api_old.event.subscription.rst b/docs/source/pype.vendor.ftrack_api_old.event.subscription.rst
deleted file mode 100644
index 423fa9a688..0000000000
--- a/docs/source/pype.vendor.ftrack_api_old.event.subscription.rst
+++ /dev/null
@@ -1,7 +0,0 @@
-pype.vendor.ftrack\_api\_old.event.subscription module
-======================================================
-
-.. automodule:: pype.vendor.ftrack_api_old.event.subscription
- :members:
- :undoc-members:
- :show-inheritance:
diff --git a/docs/source/pype.vendor.ftrack_api_old.exception.rst b/docs/source/pype.vendor.ftrack_api_old.exception.rst
deleted file mode 100644
index 54dbeeac36..0000000000
--- a/docs/source/pype.vendor.ftrack_api_old.exception.rst
+++ /dev/null
@@ -1,7 +0,0 @@
-pype.vendor.ftrack\_api\_old.exception module
-=============================================
-
-.. automodule:: pype.vendor.ftrack_api_old.exception
- :members:
- :undoc-members:
- :show-inheritance:
diff --git a/docs/source/pype.vendor.ftrack_api_old.formatter.rst b/docs/source/pype.vendor.ftrack_api_old.formatter.rst
deleted file mode 100644
index 75a23eefca..0000000000
--- a/docs/source/pype.vendor.ftrack_api_old.formatter.rst
+++ /dev/null
@@ -1,7 +0,0 @@
-pype.vendor.ftrack\_api\_old.formatter module
-=============================================
-
-.. automodule:: pype.vendor.ftrack_api_old.formatter
- :members:
- :undoc-members:
- :show-inheritance:
diff --git a/docs/source/pype.vendor.ftrack_api_old.inspection.rst b/docs/source/pype.vendor.ftrack_api_old.inspection.rst
deleted file mode 100644
index 2b8849b3d0..0000000000
--- a/docs/source/pype.vendor.ftrack_api_old.inspection.rst
+++ /dev/null
@@ -1,7 +0,0 @@
-pype.vendor.ftrack\_api\_old.inspection module
-==============================================
-
-.. automodule:: pype.vendor.ftrack_api_old.inspection
- :members:
- :undoc-members:
- :show-inheritance:
diff --git a/docs/source/pype.vendor.ftrack_api_old.logging.rst b/docs/source/pype.vendor.ftrack_api_old.logging.rst
deleted file mode 100644
index a10fa10c26..0000000000
--- a/docs/source/pype.vendor.ftrack_api_old.logging.rst
+++ /dev/null
@@ -1,7 +0,0 @@
-pype.vendor.ftrack\_api\_old.logging module
-===========================================
-
-.. automodule:: pype.vendor.ftrack_api_old.logging
- :members:
- :undoc-members:
- :show-inheritance:
diff --git a/docs/source/pype.vendor.ftrack_api_old.operation.rst b/docs/source/pype.vendor.ftrack_api_old.operation.rst
deleted file mode 100644
index a1d9d606f8..0000000000
--- a/docs/source/pype.vendor.ftrack_api_old.operation.rst
+++ /dev/null
@@ -1,7 +0,0 @@
-pype.vendor.ftrack\_api\_old.operation module
-=============================================
-
-.. automodule:: pype.vendor.ftrack_api_old.operation
- :members:
- :undoc-members:
- :show-inheritance:
diff --git a/docs/source/pype.vendor.ftrack_api_old.plugin.rst b/docs/source/pype.vendor.ftrack_api_old.plugin.rst
deleted file mode 100644
index 0f26c705d2..0000000000
--- a/docs/source/pype.vendor.ftrack_api_old.plugin.rst
+++ /dev/null
@@ -1,7 +0,0 @@
-pype.vendor.ftrack\_api\_old.plugin module
-==========================================
-
-.. automodule:: pype.vendor.ftrack_api_old.plugin
- :members:
- :undoc-members:
- :show-inheritance:
diff --git a/docs/source/pype.vendor.ftrack_api_old.query.rst b/docs/source/pype.vendor.ftrack_api_old.query.rst
deleted file mode 100644
index 5cf5aba0e4..0000000000
--- a/docs/source/pype.vendor.ftrack_api_old.query.rst
+++ /dev/null
@@ -1,7 +0,0 @@
-pype.vendor.ftrack\_api\_old.query module
-=========================================
-
-.. automodule:: pype.vendor.ftrack_api_old.query
- :members:
- :undoc-members:
- :show-inheritance:
diff --git a/docs/source/pype.vendor.ftrack_api_old.resource_identifier_transformer.base.rst b/docs/source/pype.vendor.ftrack_api_old.resource_identifier_transformer.base.rst
deleted file mode 100644
index dccf51ea71..0000000000
--- a/docs/source/pype.vendor.ftrack_api_old.resource_identifier_transformer.base.rst
+++ /dev/null
@@ -1,7 +0,0 @@
-pype.vendor.ftrack\_api\_old.resource\_identifier\_transformer.base module
-==========================================================================
-
-.. automodule:: pype.vendor.ftrack_api_old.resource_identifier_transformer.base
- :members:
- :undoc-members:
- :show-inheritance:
diff --git a/docs/source/pype.vendor.ftrack_api_old.resource_identifier_transformer.rst b/docs/source/pype.vendor.ftrack_api_old.resource_identifier_transformer.rst
deleted file mode 100644
index 342ecd9321..0000000000
--- a/docs/source/pype.vendor.ftrack_api_old.resource_identifier_transformer.rst
+++ /dev/null
@@ -1,18 +0,0 @@
-pype.vendor.ftrack\_api\_old.resource\_identifier\_transformer package
-======================================================================
-
-.. automodule:: pype.vendor.ftrack_api_old.resource_identifier_transformer
- :members:
- :undoc-members:
- :show-inheritance:
-
-Submodules
-----------
-
-pype.vendor.ftrack\_api\_old.resource\_identifier\_transformer.base module
---------------------------------------------------------------------------
-
-.. automodule:: pype.vendor.ftrack_api_old.resource_identifier_transformer.base
- :members:
- :undoc-members:
- :show-inheritance:
diff --git a/docs/source/pype.vendor.ftrack_api_old.rst b/docs/source/pype.vendor.ftrack_api_old.rst
deleted file mode 100644
index 51d0a29357..0000000000
--- a/docs/source/pype.vendor.ftrack_api_old.rst
+++ /dev/null
@@ -1,126 +0,0 @@
-pype.vendor.ftrack\_api\_old package
-====================================
-
-.. automodule:: pype.vendor.ftrack_api_old
- :members:
- :undoc-members:
- :show-inheritance:
-
-Subpackages
------------
-
-.. toctree::
- :maxdepth: 6
-
- pype.vendor.ftrack_api_old.accessor
- pype.vendor.ftrack_api_old.entity
- pype.vendor.ftrack_api_old.event
- pype.vendor.ftrack_api_old.resource_identifier_transformer
- pype.vendor.ftrack_api_old.structure
-
-Submodules
-----------
-
-pype.vendor.ftrack\_api\_old.attribute module
----------------------------------------------
-
-.. automodule:: pype.vendor.ftrack_api_old.attribute
- :members:
- :undoc-members:
- :show-inheritance:
-
-pype.vendor.ftrack\_api\_old.cache module
------------------------------------------
-
-.. automodule:: pype.vendor.ftrack_api_old.cache
- :members:
- :undoc-members:
- :show-inheritance:
-
-pype.vendor.ftrack\_api\_old.collection module
-----------------------------------------------
-
-.. automodule:: pype.vendor.ftrack_api_old.collection
- :members:
- :undoc-members:
- :show-inheritance:
-
-pype.vendor.ftrack\_api\_old.data module
-----------------------------------------
-
-.. automodule:: pype.vendor.ftrack_api_old.data
- :members:
- :undoc-members:
- :show-inheritance:
-
-pype.vendor.ftrack\_api\_old.exception module
----------------------------------------------
-
-.. automodule:: pype.vendor.ftrack_api_old.exception
- :members:
- :undoc-members:
- :show-inheritance:
-
-pype.vendor.ftrack\_api\_old.formatter module
----------------------------------------------
-
-.. automodule:: pype.vendor.ftrack_api_old.formatter
- :members:
- :undoc-members:
- :show-inheritance:
-
-pype.vendor.ftrack\_api\_old.inspection module
-----------------------------------------------
-
-.. automodule:: pype.vendor.ftrack_api_old.inspection
- :members:
- :undoc-members:
- :show-inheritance:
-
-pype.vendor.ftrack\_api\_old.logging module
--------------------------------------------
-
-.. automodule:: pype.vendor.ftrack_api_old.logging
- :members:
- :undoc-members:
- :show-inheritance:
-
-pype.vendor.ftrack\_api\_old.operation module
----------------------------------------------
-
-.. automodule:: pype.vendor.ftrack_api_old.operation
- :members:
- :undoc-members:
- :show-inheritance:
-
-pype.vendor.ftrack\_api\_old.plugin module
-------------------------------------------
-
-.. automodule:: pype.vendor.ftrack_api_old.plugin
- :members:
- :undoc-members:
- :show-inheritance:
-
-pype.vendor.ftrack\_api\_old.query module
------------------------------------------
-
-.. automodule:: pype.vendor.ftrack_api_old.query
- :members:
- :undoc-members:
- :show-inheritance:
-
-pype.vendor.ftrack\_api\_old.session module
--------------------------------------------
-
-.. automodule:: pype.vendor.ftrack_api_old.session
- :members:
- :undoc-members:
- :show-inheritance:
-
-pype.vendor.ftrack\_api\_old.symbol module
-------------------------------------------
-
-.. automodule:: pype.vendor.ftrack_api_old.symbol
- :members:
- :undoc-members:
- :show-inheritance:
diff --git a/docs/source/pype.vendor.ftrack_api_old.session.rst b/docs/source/pype.vendor.ftrack_api_old.session.rst
deleted file mode 100644
index beecdeb6af..0000000000
--- a/docs/source/pype.vendor.ftrack_api_old.session.rst
+++ /dev/null
@@ -1,7 +0,0 @@
-pype.vendor.ftrack\_api\_old.session module
-===========================================
-
-.. automodule:: pype.vendor.ftrack_api_old.session
- :members:
- :undoc-members:
- :show-inheritance:
diff --git a/docs/source/pype.vendor.ftrack_api_old.structure.base.rst b/docs/source/pype.vendor.ftrack_api_old.structure.base.rst
deleted file mode 100644
index 617d8aaed7..0000000000
--- a/docs/source/pype.vendor.ftrack_api_old.structure.base.rst
+++ /dev/null
@@ -1,7 +0,0 @@
-pype.vendor.ftrack\_api\_old.structure.base module
-==================================================
-
-.. automodule:: pype.vendor.ftrack_api_old.structure.base
- :members:
- :undoc-members:
- :show-inheritance:
diff --git a/docs/source/pype.vendor.ftrack_api_old.structure.entity_id.rst b/docs/source/pype.vendor.ftrack_api_old.structure.entity_id.rst
deleted file mode 100644
index ab6fd0997a..0000000000
--- a/docs/source/pype.vendor.ftrack_api_old.structure.entity_id.rst
+++ /dev/null
@@ -1,7 +0,0 @@
-pype.vendor.ftrack\_api\_old.structure.entity\_id module
-========================================================
-
-.. automodule:: pype.vendor.ftrack_api_old.structure.entity_id
- :members:
- :undoc-members:
- :show-inheritance:
diff --git a/docs/source/pype.vendor.ftrack_api_old.structure.id.rst b/docs/source/pype.vendor.ftrack_api_old.structure.id.rst
deleted file mode 100644
index 6b887b7917..0000000000
--- a/docs/source/pype.vendor.ftrack_api_old.structure.id.rst
+++ /dev/null
@@ -1,7 +0,0 @@
-pype.vendor.ftrack\_api\_old.structure.id module
-================================================
-
-.. automodule:: pype.vendor.ftrack_api_old.structure.id
- :members:
- :undoc-members:
- :show-inheritance:
diff --git a/docs/source/pype.vendor.ftrack_api_old.structure.origin.rst b/docs/source/pype.vendor.ftrack_api_old.structure.origin.rst
deleted file mode 100644
index 8ad5fbdc11..0000000000
--- a/docs/source/pype.vendor.ftrack_api_old.structure.origin.rst
+++ /dev/null
@@ -1,7 +0,0 @@
-pype.vendor.ftrack\_api\_old.structure.origin module
-====================================================
-
-.. automodule:: pype.vendor.ftrack_api_old.structure.origin
- :members:
- :undoc-members:
- :show-inheritance:
diff --git a/docs/source/pype.vendor.ftrack_api_old.structure.rst b/docs/source/pype.vendor.ftrack_api_old.structure.rst
deleted file mode 100644
index 2402430589..0000000000
--- a/docs/source/pype.vendor.ftrack_api_old.structure.rst
+++ /dev/null
@@ -1,50 +0,0 @@
-pype.vendor.ftrack\_api\_old.structure package
-==============================================
-
-.. automodule:: pype.vendor.ftrack_api_old.structure
- :members:
- :undoc-members:
- :show-inheritance:
-
-Submodules
-----------
-
-pype.vendor.ftrack\_api\_old.structure.base module
---------------------------------------------------
-
-.. automodule:: pype.vendor.ftrack_api_old.structure.base
- :members:
- :undoc-members:
- :show-inheritance:
-
-pype.vendor.ftrack\_api\_old.structure.entity\_id module
---------------------------------------------------------
-
-.. automodule:: pype.vendor.ftrack_api_old.structure.entity_id
- :members:
- :undoc-members:
- :show-inheritance:
-
-pype.vendor.ftrack\_api\_old.structure.id module
-------------------------------------------------
-
-.. automodule:: pype.vendor.ftrack_api_old.structure.id
- :members:
- :undoc-members:
- :show-inheritance:
-
-pype.vendor.ftrack\_api\_old.structure.origin module
-----------------------------------------------------
-
-.. automodule:: pype.vendor.ftrack_api_old.structure.origin
- :members:
- :undoc-members:
- :show-inheritance:
-
-pype.vendor.ftrack\_api\_old.structure.standard module
-------------------------------------------------------
-
-.. automodule:: pype.vendor.ftrack_api_old.structure.standard
- :members:
- :undoc-members:
- :show-inheritance:
diff --git a/docs/source/pype.vendor.ftrack_api_old.structure.standard.rst b/docs/source/pype.vendor.ftrack_api_old.structure.standard.rst
deleted file mode 100644
index 800201084f..0000000000
--- a/docs/source/pype.vendor.ftrack_api_old.structure.standard.rst
+++ /dev/null
@@ -1,7 +0,0 @@
-pype.vendor.ftrack\_api\_old.structure.standard module
-======================================================
-
-.. automodule:: pype.vendor.ftrack_api_old.structure.standard
- :members:
- :undoc-members:
- :show-inheritance:
diff --git a/docs/source/pype.vendor.ftrack_api_old.symbol.rst b/docs/source/pype.vendor.ftrack_api_old.symbol.rst
deleted file mode 100644
index bc358d374a..0000000000
--- a/docs/source/pype.vendor.ftrack_api_old.symbol.rst
+++ /dev/null
@@ -1,7 +0,0 @@
-pype.vendor.ftrack\_api\_old.symbol module
-==========================================
-
-.. automodule:: pype.vendor.ftrack_api_old.symbol
- :members:
- :undoc-members:
- :show-inheritance:
diff --git a/docs/source/pype.vendor.pysync.rst b/docs/source/pype.vendor.pysync.rst
deleted file mode 100644
index fbe5b33fb7..0000000000
--- a/docs/source/pype.vendor.pysync.rst
+++ /dev/null
@@ -1,7 +0,0 @@
-pype.vendor.pysync module
-=========================
-
-.. automodule:: pype.vendor.pysync
- :members:
- :undoc-members:
- :show-inheritance:
diff --git a/docs/source/pype.vendor.rst b/docs/source/pype.vendor.rst
deleted file mode 100644
index 23aa17f7ab..0000000000
--- a/docs/source/pype.vendor.rst
+++ /dev/null
@@ -1,37 +0,0 @@
-pype.vendor package
-===================
-
-.. automodule:: pype.vendor
- :members:
- :undoc-members:
- :show-inheritance:
-
-Subpackages
------------
-
-.. toctree::
- :maxdepth: 6
-
- pype.vendor.backports
- pype.vendor.builtins
- pype.vendor.capture_gui
- pype.vendor.ftrack_api_old
-
-Submodules
-----------
-
-pype.vendor.capture module
---------------------------
-
-.. automodule:: pype.vendor.capture
- :members:
- :undoc-members:
- :show-inheritance:
-
-pype.vendor.pysync module
--------------------------
-
-.. automodule:: pype.vendor.pysync
- :members:
- :undoc-members:
- :show-inheritance:
diff --git a/docs/source/pype.version.rst b/docs/source/pype.version.rst
deleted file mode 100644
index 7ec69dc423..0000000000
--- a/docs/source/pype.version.rst
+++ /dev/null
@@ -1,7 +0,0 @@
-pype.version module
-===================
-
-.. automodule:: pype.version
- :members:
- :undoc-members:
- :show-inheritance:
diff --git a/docs/source/pype.widgets.message_window.rst b/docs/source/pype.widgets.message_window.rst
deleted file mode 100644
index 60be203837..0000000000
--- a/docs/source/pype.widgets.message_window.rst
+++ /dev/null
@@ -1,7 +0,0 @@
-pype.widgets.message\_window module
-===================================
-
-.. automodule:: pype.widgets.message_window
- :members:
- :undoc-members:
- :show-inheritance:
diff --git a/docs/source/pype.widgets.popup.rst b/docs/source/pype.widgets.popup.rst
deleted file mode 100644
index 7186ff48de..0000000000
--- a/docs/source/pype.widgets.popup.rst
+++ /dev/null
@@ -1,7 +0,0 @@
-pype.widgets.popup module
-=========================
-
-.. automodule:: pype.widgets.popup
- :members:
- :undoc-members:
- :show-inheritance:
diff --git a/docs/source/pype.widgets.project_settings.rst b/docs/source/pype.widgets.project_settings.rst
deleted file mode 100644
index 9589cf5479..0000000000
--- a/docs/source/pype.widgets.project_settings.rst
+++ /dev/null
@@ -1,7 +0,0 @@
-pype.widgets.project\_settings module
-=====================================
-
-.. automodule:: pype.widgets.project_settings
- :members:
- :undoc-members:
- :show-inheritance:
diff --git a/docs/source/pype.widgets.rst b/docs/source/pype.widgets.rst
deleted file mode 100644
index 1f09318b67..0000000000
--- a/docs/source/pype.widgets.rst
+++ /dev/null
@@ -1,34 +0,0 @@
-pype.widgets package
-====================
-
-.. automodule:: pype.widgets
- :members:
- :undoc-members:
- :show-inheritance:
-
-Submodules
-----------
-
-pype.widgets.message\_window module
------------------------------------
-
-.. automodule:: pype.widgets.message_window
- :members:
- :undoc-members:
- :show-inheritance:
-
-pype.widgets.popup module
--------------------------
-
-.. automodule:: pype.widgets.popup
- :members:
- :undoc-members:
- :show-inheritance:
-
-pype.widgets.project\_settings module
--------------------------------------
-
-.. automodule:: pype.widgets.project_settings
- :members:
- :undoc-members:
- :show-inheritance:
diff --git a/docs/source/readme.rst b/docs/source/readme.rst
index 823c0df3c8..138b88bba8 100644
--- a/docs/source/readme.rst
+++ b/docs/source/readme.rst
@@ -1,2 +1,6 @@
-.. title:: Pype Readme
+===============
+OpenPype Readme
+===============
+
.. include:: ../../README.md
+ :parser: myst_parser.sphinx_
diff --git a/igniter/__init__.py b/igniter/__init__.py
index aa1b1d209e..085a825860 100644
--- a/igniter/__init__.py
+++ b/igniter/__init__.py
@@ -19,21 +19,41 @@ if "OpenPypeVersion" not in sys.modules:
sys.modules["OpenPypeVersion"] = OpenPypeVersion
+def _get_qt_app():
+ from qtpy import QtWidgets, QtCore
+
+ app = QtWidgets.QApplication.instance()
+ if app is not None:
+ return app
+
+ for attr_name in (
+ "AA_EnableHighDpiScaling",
+ "AA_UseHighDpiPixmaps",
+ ):
+ attr = getattr(QtCore.Qt, attr_name, None)
+ if attr is not None:
+ QtWidgets.QApplication.setAttribute(attr)
+
+ policy = os.getenv("QT_SCALE_FACTOR_ROUNDING_POLICY")
+ if (
+ hasattr(QtWidgets.QApplication, "setHighDpiScaleFactorRoundingPolicy")
+ and not policy
+ ):
+ QtWidgets.QApplication.setHighDpiScaleFactorRoundingPolicy(
+ QtCore.Qt.HighDpiScaleFactorRoundingPolicy.PassThrough
+ )
+
+ return QtWidgets.QApplication(sys.argv)
+
+
def open_dialog():
"""Show Igniter dialog."""
if os.getenv("OPENPYPE_HEADLESS_MODE"):
print("!!! Can't open dialog in headless mode. Exiting.")
sys.exit(1)
- from qtpy import QtWidgets, QtCore
from .install_dialog import InstallDialog
- scale_attr = getattr(QtCore.Qt, "AA_EnableHighDpiScaling", None)
- if scale_attr is not None:
- QtWidgets.QApplication.setAttribute(scale_attr)
-
- app = QtWidgets.QApplication.instance()
- if not app:
- app = QtWidgets.QApplication(sys.argv)
+ app = _get_qt_app()
d = InstallDialog()
d.open()
@@ -47,16 +67,10 @@ def open_update_window(openpype_version):
if os.getenv("OPENPYPE_HEADLESS_MODE"):
print("!!! Can't open dialog in headless mode. Exiting.")
sys.exit(1)
- from qtpy import QtWidgets, QtCore
+
from .update_window import UpdateWindow
- scale_attr = getattr(QtCore.Qt, "AA_EnableHighDpiScaling", None)
- if scale_attr is not None:
- QtWidgets.QApplication.setAttribute(scale_attr)
-
- app = QtWidgets.QApplication.instance()
- if not app:
- app = QtWidgets.QApplication(sys.argv)
+ app = _get_qt_app()
d = UpdateWindow(version=openpype_version)
d.open()
@@ -71,16 +85,10 @@ def show_message_dialog(title, message):
if os.getenv("OPENPYPE_HEADLESS_MODE"):
print("!!! Can't open dialog in headless mode. Exiting.")
sys.exit(1)
- from qtpy import QtWidgets, QtCore
+
from .message_dialog import MessageDialog
- scale_attr = getattr(QtCore.Qt, "AA_EnableHighDpiScaling", None)
- if scale_attr is not None:
- QtWidgets.QApplication.setAttribute(scale_attr)
-
- app = QtWidgets.QApplication.instance()
- if not app:
- app = QtWidgets.QApplication(sys.argv)
+ app = _get_qt_app()
dialog = MessageDialog(title, message)
dialog.open()
diff --git a/igniter/bootstrap_repos.py b/igniter/bootstrap_repos.py
index 6c7c834062..e7b440f812 100644
--- a/igniter/bootstrap_repos.py
+++ b/igniter/bootstrap_repos.py
@@ -25,7 +25,8 @@ from .user_settings import (
from .tools import (
get_openpype_global_settings,
get_openpype_path_from_settings,
- get_expected_studio_version_str
+ get_expected_studio_version_str,
+ get_local_openpype_path_from_settings
)
@@ -34,6 +35,29 @@ LOG_WARNING = 1
LOG_ERROR = 3
+def sanitize_long_path(path):
+ """Sanitize long paths (260 characters) when on Windows.
+
+ Long paths are not capatible with ZipFile or reading a file, so we can
+ shorten the path to use.
+
+ Args:
+ path (str): path to either directory or file.
+
+ Returns:
+ str: sanitized path
+ """
+ if platform.system().lower() != "windows":
+ return path
+ path = os.path.abspath(path)
+
+ if path.startswith("\\\\"):
+ path = "\\\\?\\UNC\\" + path[2:]
+ else:
+ path = "\\\\?\\" + path
+ return path
+
+
def sha256sum(filename):
"""Calculate sha256 for content of the file.
@@ -53,6 +77,13 @@ def sha256sum(filename):
return h.hexdigest()
+class ZipFileLongPaths(ZipFile):
+ def _extract_member(self, member, targetpath, pwd):
+ return ZipFile._extract_member(
+ self, member, sanitize_long_path(targetpath), pwd
+ )
+
+
class OpenPypeVersion(semver.VersionInfo):
"""Class for storing information about OpenPype version.
@@ -61,6 +92,8 @@ class OpenPypeVersion(semver.VersionInfo):
"""
path = None
+
+ _local_openpype_path = None
# this should match any string complying with https://semver.org/
_VERSION_REGEX = re.compile(r"(?P0|[1-9]\d*)\.(?P0|[1-9]\d*)\.(?P0|[1-9]\d*)(?:-(?P[a-zA-Z\d\-.]*))?(?:\+(?P[a-zA-Z\d\-.]*))?") # noqa: E501
_installed_version = None
@@ -289,6 +322,23 @@ class OpenPypeVersion(semver.VersionInfo):
"""
return os.getenv("OPENPYPE_PATH")
+ @classmethod
+ def get_local_openpype_path(cls):
+ """Path to unzipped versions.
+
+ By default it should be user appdata, but could be overridden by
+ settings.
+ """
+ if cls._local_openpype_path:
+ return cls._local_openpype_path
+
+ settings = get_openpype_global_settings(os.environ["OPENPYPE_MONGO"])
+ data_dir = get_local_openpype_path_from_settings(settings)
+ if not data_dir:
+ data_dir = Path(user_data_dir("openpype", "pypeclub"))
+ cls._local_openpype_path = data_dir
+ return data_dir
+
@classmethod
def openpype_path_is_set(cls):
"""Path to OpenPype zip directory is set."""
@@ -319,9 +369,8 @@ class OpenPypeVersion(semver.VersionInfo):
list: of compatible versions available on the machine.
"""
- # DEPRECATED: backwards compatible way to look for versions in root
- dir_to_search = Path(user_data_dir("openpype", "pypeclub"))
- versions = OpenPypeVersion.get_versions_from_directory(dir_to_search)
+ dir_to_search = cls.get_local_openpype_path()
+ versions = cls.get_versions_from_directory(dir_to_search)
return list(sorted(set(versions)))
@@ -533,17 +582,15 @@ class BootstrapRepos:
"""
# vendor and app used to construct user data dir
- self._vendor = "pypeclub"
- self._app = "openpype"
+ self._message = message
self._log = log.getLogger(str(__class__))
- self.data_dir = Path(user_data_dir(self._app, self._vendor))
+ self.set_data_dir(None)
self.secure_registry = OpenPypeSecureRegistry("mongodb")
self.registry = OpenPypeSettingsRegistry()
self.zip_filter = [".pyc", "__pycache__"]
self.openpype_filter = [
- "openpype", "schema", "LICENSE"
+ "openpype", "LICENSE"
]
- self._message = message
# dummy progress reporter
def empty_progress(x: int):
@@ -554,6 +601,13 @@ class BootstrapRepos:
progress_callback = empty_progress
self._progress_callback = progress_callback
+ def set_data_dir(self, data_dir):
+ if not data_dir:
+ self.data_dir = Path(user_data_dir("openpype", "pypeclub"))
+ else:
+ self._print(f"overriding local folder: {data_dir}")
+ self.data_dir = data_dir
+
@staticmethod
def get_version_path_from_list(
version: str, version_list: list) -> Union[Path, None]:
@@ -756,7 +810,7 @@ class BootstrapRepos:
def _create_openpype_zip(self, zip_path: Path, openpype_path: Path) -> None:
"""Pack repositories and OpenPype into zip.
- We are using :mod:`zipfile` instead :meth:`shutil.make_archive`
+ We are using :mod:`ZipFile` instead :meth:`shutil.make_archive`
because we need to decide what file and directories to include in zip
and what not. They are determined by :attr:`zip_filter` on file level
and :attr:`openpype_filter` on top level directory in OpenPype
@@ -810,7 +864,7 @@ class BootstrapRepos:
checksums.append(
(
- sha256sum(file.as_posix()),
+ sha256sum(sanitize_long_path(file.as_posix())),
file.resolve().relative_to(openpype_root)
)
)
@@ -934,7 +988,9 @@ class BootstrapRepos:
if platform.system().lower() == "windows":
file_name = file_name.replace("/", "\\")
try:
- current = sha256sum((path / file_name).as_posix())
+ current = sha256sum(
+ sanitize_long_path((path / file_name).as_posix())
+ )
except FileNotFoundError:
return False, f"Missing file [ {file_name} ]"
@@ -1246,7 +1302,7 @@ class BootstrapRepos:
# extract zip there
self._print("Extracting zip to destination ...")
- with ZipFile(version.path, "r") as zip_ref:
+ with ZipFileLongPaths(version.path, "r") as zip_ref:
zip_ref.extractall(destination)
self._print(f"Installed as {version.path.stem}")
@@ -1362,7 +1418,7 @@ class BootstrapRepos:
# extract zip there
self._print("extracting zip to destination ...")
- with ZipFile(openpype_version.path, "r") as zip_ref:
+ with ZipFileLongPaths(openpype_version.path, "r") as zip_ref:
self._progress_callback(75)
zip_ref.extractall(destination)
self._progress_callback(100)
diff --git a/igniter/install_thread.py b/igniter/install_thread.py
index 4723e6adfb..1d55213de7 100644
--- a/igniter/install_thread.py
+++ b/igniter/install_thread.py
@@ -14,7 +14,11 @@ from .bootstrap_repos import (
OpenPypeVersion
)
-from .tools import validate_mongo_connection
+from .tools import (
+ get_openpype_global_settings,
+ get_local_openpype_path_from_settings,
+ validate_mongo_connection
+)
class InstallThread(QtCore.QThread):
@@ -80,6 +84,15 @@ class InstallThread(QtCore.QThread):
return
os.environ["OPENPYPE_MONGO"] = self._mongo
+ if not validate_mongo_connection(self._mongo):
+ self.message.emit(f"Cannot connect to {self._mongo}", True)
+ self._set_result(-1)
+ return
+
+ global_settings = get_openpype_global_settings(self._mongo)
+ data_dir = get_local_openpype_path_from_settings(global_settings)
+ bs.set_data_dir(data_dir)
+
self.message.emit(
f"Detecting installed OpenPype versions in {bs.data_dir}",
False)
diff --git a/igniter/tools.py b/igniter/tools.py
index 79235b2329..9dea203f0c 100644
--- a/igniter/tools.py
+++ b/igniter/tools.py
@@ -40,7 +40,7 @@ def should_add_certificate_path_to_mongo_url(mongo_url):
add_certificate = False
# Check if url 'ssl' or 'tls' are set to 'true'
for key in ("ssl", "tls"):
- if key in query and "true" in query["ssl"]:
+ if key in query and "true" in query[key]:
add_certificate = True
break
@@ -73,7 +73,7 @@ def validate_mongo_connection(cnx: str) -> (bool, str):
}
# Add certificate path if should be required
if should_add_certificate_path_to_mongo_url(cnx):
- kwargs["ssl_ca_certs"] = certifi.where()
+ kwargs["tlsCAFile"] = certifi.where()
try:
client = MongoClient(cnx, **kwargs)
@@ -147,7 +147,7 @@ def get_openpype_global_settings(url: str) -> dict:
"""
kwargs = {}
if should_add_certificate_path_to_mongo_url(url):
- kwargs["ssl_ca_certs"] = certifi.where()
+ kwargs["tlsCAFile"] = certifi.where()
try:
# Create mongo connection
@@ -188,6 +188,26 @@ def get_openpype_path_from_settings(settings: dict) -> Union[str, None]:
return next((path for path in paths if os.path.exists(path)), None)
+def get_local_openpype_path_from_settings(settings: dict) -> Union[str, None]:
+ """Get OpenPype local path from global settings.
+
+ Used to download and unzip OP versions.
+ Args:
+ settings (dict): settings from DB.
+
+ Returns:
+ path to OpenPype or None if not found
+ """
+ path = (
+ settings
+ .get("local_openpype_path", {})
+ .get(platform.system().lower())
+ )
+ if path:
+ return Path(path)
+ return None
+
+
def get_expected_studio_version_str(
staging=False, global_settings=None
) -> str:
diff --git a/igniter/update_thread.py b/igniter/update_thread.py
index e98c95f892..0223477d0a 100644
--- a/igniter/update_thread.py
+++ b/igniter/update_thread.py
@@ -48,6 +48,8 @@ class UpdateThread(QtCore.QThread):
"""
bs = BootstrapRepos(
progress_callback=self.set_progress, message=self.message)
+
+ bs.set_data_dir(OpenPypeVersion.get_local_openpype_path())
version_path = bs.install_version(self._openpype_version)
self._set_result(version_path)
diff --git a/inno_setup.iss b/inno_setup.iss
index 418bedbd4d..d9a41d22ee 100644
--- a/inno_setup.iss
+++ b/inno_setup.iss
@@ -36,7 +36,7 @@ WizardStyle=modern
Name: "english"; MessagesFile: "compiler:Default.isl"
[Tasks]
-Name: "desktopicon"; Description: "{cm:CreateDesktopIcon}"; GroupDescription: "{cm:AdditionalIcons}"; Flags: unchecked
+Name: "desktopicon"; Description: "{cm:CreateDesktopIcon}"; GroupDescription: "{cm:AdditionalIcons}"
[InstallDelete]
; clean everything in previous installation folder
@@ -53,4 +53,3 @@ Name: "{autodesktop}\{#MyAppName} {#AppVer}"; Filename: "{app}\openpype_gui.exe"
[Run]
Filename: "{app}\openpype_gui.exe"; Description: "{cm:LaunchProgram,OpenPype}"; Flags: nowait postinstall skipifsilent
-
diff --git a/openpype/__init__.py b/openpype/__init__.py
index 810664707a..e6b77b1853 100644
--- a/openpype/__init__.py
+++ b/openpype/__init__.py
@@ -3,3 +3,5 @@ import os
PACKAGE_DIR = os.path.dirname(os.path.abspath(__file__))
PLUGINS_DIR = os.path.join(PACKAGE_DIR, "plugins")
+
+AYON_SERVER_ENABLED = os.environ.get("USE_AYON_SERVER") == "1"
diff --git a/openpype/action.py b/openpype/action.py
deleted file mode 100644
index 6114c65fd4..0000000000
--- a/openpype/action.py
+++ /dev/null
@@ -1,135 +0,0 @@
-import warnings
-import functools
-import pyblish.api
-
-
-class ActionDeprecatedWarning(DeprecationWarning):
- pass
-
-
-def deprecated(new_destination):
- """Mark functions as deprecated.
-
- It will result in a warning being emitted when the function is used.
- """
-
- func = None
- if callable(new_destination):
- func = new_destination
- new_destination = None
-
- def _decorator(decorated_func):
- if new_destination is None:
- warning_message = (
- " Please check content of deprecated function to figure out"
- " possible replacement."
- )
- else:
- warning_message = " Please replace your usage with '{}'.".format(
- new_destination
- )
-
- @functools.wraps(decorated_func)
- def wrapper(*args, **kwargs):
- warnings.simplefilter("always", ActionDeprecatedWarning)
- warnings.warn(
- (
- "Call to deprecated function '{}'"
- "\nFunction was moved or removed.{}"
- ).format(decorated_func.__name__, warning_message),
- category=ActionDeprecatedWarning,
- stacklevel=4
- )
- return decorated_func(*args, **kwargs)
- return wrapper
-
- if func is None:
- return _decorator
- return _decorator(func)
-
-
-@deprecated("openpype.pipeline.publish.get_errored_instances_from_context")
-def get_errored_instances_from_context(context, plugin=None):
- """
- Deprecated:
- Since 3.14.* will be removed in 3.16.* or later.
- """
-
- from openpype.pipeline.publish import get_errored_instances_from_context
-
- return get_errored_instances_from_context(context, plugin=plugin)
-
-
-@deprecated("openpype.pipeline.publish.get_errored_plugins_from_context")
-def get_errored_plugins_from_data(context):
- """
- Deprecated:
- Since 3.14.* will be removed in 3.16.* or later.
- """
-
- from openpype.pipeline.publish import get_errored_plugins_from_context
-
- return get_errored_plugins_from_context(context)
-
-
-class RepairAction(pyblish.api.Action):
- """Repairs the action
-
- To process the repairing this requires a static `repair(instance)` method
- is available on the plugin.
-
- Deprecated:
- 'RepairAction' and 'RepairContextAction' were moved to
- 'openpype.pipeline.publish' please change you imports.
- There is no "reasonable" way hot mark these classes as deprecated
- to show warning of wrong import. Deprecated since 3.14.* will be
- removed in 3.16.*
-
- """
- label = "Repair"
- on = "failed" # This action is only available on a failed plug-in
- icon = "wrench" # Icon from Awesome Icon
-
- def process(self, context, plugin):
-
- if not hasattr(plugin, "repair"):
- raise RuntimeError("Plug-in does not have repair method.")
-
- # Get the errored instances
- self.log.info("Finding failed instances..")
- errored_instances = get_errored_instances_from_context(context,
- plugin=plugin)
- for instance in errored_instances:
- plugin.repair(instance)
-
-
-class RepairContextAction(pyblish.api.Action):
- """Repairs the action
-
- To process the repairing this requires a static `repair(instance)` method
- is available on the plugin.
-
- Deprecated:
- 'RepairAction' and 'RepairContextAction' were moved to
- 'openpype.pipeline.publish' please change you imports.
- There is no "reasonable" way hot mark these classes as deprecated
- to show warning of wrong import. Deprecated since 3.14.* will be
- removed in 3.16.*
-
- """
- label = "Repair"
- on = "failed" # This action is only available on a failed plug-in
-
- def process(self, context, plugin):
-
- if not hasattr(plugin, "repair"):
- raise RuntimeError("Plug-in does not have repair method.")
-
- # Get the errored instances
- self.log.info("Finding failed instances..")
- errored_plugins = get_errored_plugins_from_data(context)
-
- # Apply pyblish.logic to get the instances for the plug-in
- if plugin in errored_plugins:
- self.log.info("Attempting fix ...")
- plugin.repair(context)
diff --git a/openpype/cli.py b/openpype/cli.py
index 54af42920d..7422f32f13 100644
--- a/openpype/cli.py
+++ b/openpype/cli.py
@@ -5,11 +5,25 @@ import sys
import code
import click
-# import sys
+from openpype import AYON_SERVER_ENABLED
from .pype_commands import PypeCommands
-@click.group(invoke_without_command=True)
+class AliasedGroup(click.Group):
+ def __init__(self, *args, **kwargs):
+ super().__init__(*args, **kwargs)
+ self._aliases = {}
+
+ def set_alias(self, src_name, dst_name):
+ self._aliases[dst_name] = src_name
+
+ def get_command(self, ctx, cmd_name):
+ if cmd_name in self._aliases:
+ cmd_name = self._aliases[cmd_name]
+ return super().get_command(ctx, cmd_name)
+
+
+@click.group(cls=AliasedGroup, invoke_without_command=True)
@click.pass_context
@click.option("--use-version",
expose_value=False, help="use specified version")
@@ -33,7 +47,11 @@ def main(ctx):
if ctx.invoked_subcommand is None:
# Print help if headless mode is used
- if os.environ.get("OPENPYPE_HEADLESS_MODE") == "1":
+ if AYON_SERVER_ENABLED:
+ is_headless = os.getenv("AYON_HEADLESS_MODE") == "1"
+ else:
+ is_headless = os.getenv("OPENPYPE_HEADLESS_MODE") == "1"
+ if is_headless:
print(ctx.get_help())
sys.exit(0)
else:
@@ -44,6 +62,9 @@ def main(ctx):
@click.option("-d", "--dev", is_flag=True, help="Settings in Dev mode")
def settings(dev):
"""Show Pype Settings UI."""
+
+ if AYON_SERVER_ENABLED:
+ raise RuntimeError("AYON does not support 'settings' command.")
PypeCommands().launch_settings_gui(dev)
@@ -58,16 +79,20 @@ def tray():
@PypeCommands.add_modules
-@main.group(help="Run command line arguments of OpenPype modules")
+@main.group(help="Run command line arguments of OpenPype addons")
@click.pass_context
def module(ctx):
- """Module specific commands created dynamically.
+ """Addon specific commands created dynamically.
- These commands are generated dynamically by currently loaded addon/modules.
+ These commands are generated dynamically by currently loaded addons.
"""
pass
+# Add 'addon' as alias for module
+main.set_alias("module", "addon")
+
+
@main.command()
@click.option("--ftrack-url", envvar="FTRACK_SERVER",
help="Ftrack server url")
@@ -93,6 +118,8 @@ def eventserver(ftrack_url,
on linux and window service).
"""
+ if AYON_SERVER_ENABLED:
+ raise RuntimeError("AYON does not support 'eventserver' command.")
PypeCommands().launch_eventservercli(
ftrack_url,
ftrack_user,
@@ -117,6 +144,10 @@ def webpublisherwebserver(executable, upload_dir, host=None, port=None):
Expect "pype.club" user created on Ftrack.
"""
+ if AYON_SERVER_ENABLED:
+ raise RuntimeError(
+ "AYON does not support 'webpublisherwebserver' command."
+ )
PypeCommands().launch_webpublisher_webservercli(
upload_dir=upload_dir,
executable=executable,
@@ -165,122 +196,10 @@ def publish(paths, targets, gui):
PypeCommands.publish(list(paths), targets, gui)
-@main.command()
-@click.argument("path")
-@click.option("-h", "--host", help="Host")
-@click.option("-u", "--user", help="User email address")
-@click.option("-p", "--project", help="Project")
-@click.option("-t", "--targets", help="Targets", default=None,
- multiple=True)
-def remotepublishfromapp(project, path, host, user=None, targets=None):
- """Start CLI publishing.
-
- Publish collects json from paths provided as an argument.
- More than one path is allowed.
- """
-
- PypeCommands.remotepublishfromapp(
- project, path, host, user, targets=targets
- )
-
-
-@main.command()
-@click.argument("path")
-@click.option("-u", "--user", help="User email address")
-@click.option("-p", "--project", help="Project")
-@click.option("-t", "--targets", help="Targets", default=None,
- multiple=True)
-def remotepublish(project, path, user=None, targets=None):
- """Start CLI publishing.
-
- Publish collects json from paths provided as an argument.
- More than one path is allowed.
- """
-
- PypeCommands.remotepublish(project, path, user, targets=targets)
-
-
-@main.command()
-@click.option("-p", "--project", required=True,
- help="name of project asset is under")
-@click.option("-a", "--asset", required=True,
- help="name of asset to which we want to copy textures")
-@click.option("--path", required=True,
- help="path where textures are found",
- type=click.Path(exists=True))
-def texturecopy(project, asset, path):
- """Copy specified textures to provided asset path.
-
- It validates if project and asset exists. Then it will use speedcopy to
- copy all textures found in all directories under --path to destination
- folder, determined by template texture in anatomy. I will use source
- filename and automatically rise version number on directory.
-
- Result will be copied without directory structure so it will be flat then.
- Nothing is written to database.
- """
-
- PypeCommands().texture_copy(project, asset, path)
-
-
-@main.command(context_settings={"ignore_unknown_options": True})
-@click.option("--app", help="Registered application name")
-@click.option("--project", help="Project name",
- default=lambda: os.environ.get('AVALON_PROJECT', ''))
-@click.option("--asset", help="Asset name",
- default=lambda: os.environ.get('AVALON_ASSET', ''))
-@click.option("--task", help="Task name",
- default=lambda: os.environ.get('AVALON_TASK', ''))
-@click.option("--tools", help="List of tools to add")
-@click.option("--user", help="Pype user name",
- default=lambda: os.environ.get('OPENPYPE_USERNAME', ''))
-@click.option("-fs",
- "--ftrack-server",
- help="Registered application name",
- default=lambda: os.environ.get('FTRACK_SERVER', ''))
-@click.option("-fu",
- "--ftrack-user",
- help="Registered application name",
- default=lambda: os.environ.get('FTRACK_API_USER', ''))
-@click.option("-fk",
- "--ftrack-key",
- help="Registered application name",
- default=lambda: os.environ.get('FTRACK_API_KEY', ''))
-@click.argument('arguments', nargs=-1)
-def launch(app, project, asset, task,
- ftrack_server, ftrack_user, ftrack_key, tools, arguments, user):
- """Launch registered application name in Pype context.
-
- You can define applications in pype-config toml files. Project, asset name
- and task name must be provided (even if they are not used by app itself).
- Optionally you can specify ftrack credentials if needed.
-
- ARGUMENTS are passed to launched application.
-
- """
- # TODO: this needs to switch for Settings
- if ftrack_server:
- os.environ["FTRACK_SERVER"] = ftrack_server
-
- if ftrack_server:
- os.environ["FTRACK_API_USER"] = ftrack_user
-
- if ftrack_server:
- os.environ["FTRACK_API_KEY"] = ftrack_key
-
- if user:
- os.environ["OPENPYPE_USERNAME"] = user
-
- # test required
- if not project or not asset or not task:
- print("!!! Missing required arguments")
- return
-
- PypeCommands().run_application(app, project, asset, task, tools, arguments)
-
-
@main.command(context_settings={"ignore_unknown_options": True})
def projectmanager():
+ if AYON_SERVER_ENABLED:
+ raise RuntimeError("AYON does not support 'projectmanager' command.")
PypeCommands().launch_project_manager()
@@ -371,19 +290,29 @@ def run(script):
"--setup_only",
help="Only create dbs, do not run tests",
default=None)
+@click.option("--mongo_url",
+ help="MongoDB for testing.",
+ default=None)
def runtests(folder, mark, pyargs, test_data_folder, persist, app_variant,
- timeout, setup_only):
+ timeout, setup_only, mongo_url):
"""Run all automatic tests after proper initialization via start.py"""
PypeCommands().run_tests(folder, mark, pyargs, test_data_folder,
- persist, app_variant, timeout, setup_only)
+ persist, app_variant, timeout, setup_only,
+ mongo_url)
-@main.command()
+@main.command(help="DEPRECATED - run sync server")
+@click.pass_context
@click.option("-a", "--active_site", required=True,
- help="Name of active stie")
-def syncserver(active_site):
+ help="Name of active site")
+def syncserver(ctx, active_site):
"""Run sync site server in background.
+ Deprecated:
+ This command is deprecated and will be removed in future versions.
+ Use '~/openpype_console module sync_server syncservice' instead.
+
+ Details:
Some Site Sync use cases need to expose site to another one.
For example if majority of artists work in studio, they are not using
SS at all, but if you want to expose published assets to 'studio' site
@@ -397,7 +326,12 @@ def syncserver(active_site):
var OPENPYPE_LOCAL_ID set to 'active_site'.
"""
- PypeCommands().syncserver(active_site)
+ if AYON_SERVER_ENABLED:
+ raise RuntimeError("AYON does not support 'syncserver' command.")
+
+ from openpype.modules.sync_server.sync_server_module import (
+ syncservice)
+ ctx.invoke(syncservice, active_site=active_site)
@main.command()
@@ -409,6 +343,8 @@ def repack_version(directory):
recalculating file checksums. It will try to use version detected in
directory name.
"""
+ if AYON_SERVER_ENABLED:
+ raise RuntimeError("AYON does not support 'repack-version' command.")
PypeCommands().repack_version(directory)
@@ -420,6 +356,9 @@ def repack_version(directory):
"--dbonly", help="Store only Database data", default=False, is_flag=True)
def pack_project(project, dirpath, dbonly):
"""Create a package of project with all files and database dump."""
+
+ if AYON_SERVER_ENABLED:
+ raise RuntimeError("AYON does not support 'pack-project' command.")
PypeCommands().pack_project(project, dirpath, dbonly)
@@ -432,6 +371,8 @@ def pack_project(project, dirpath, dbonly):
"--dbonly", help="Store only Database data", default=False, is_flag=True)
def unpack_project(zipfile, root, dbonly):
"""Create a package of project with all files and database dump."""
+ if AYON_SERVER_ENABLED:
+ raise RuntimeError("AYON does not support 'unpack-project' command.")
PypeCommands().unpack_project(zipfile, root, dbonly)
@@ -446,9 +387,17 @@ def interactive():
Executable 'openpype_gui' on Windows won't work.
"""
- from openpype.version import __version__
+ if AYON_SERVER_ENABLED:
+ version = os.environ["AYON_VERSION"]
+ banner = (
+ f"AYON launcher {version}\nPython {sys.version} on {sys.platform}"
+ )
+ else:
+ from openpype.version import __version__
- banner = f"OpenPype {__version__}\nPython {sys.version} on {sys.platform}"
+ banner = (
+ f"OpenPype {__version__}\nPython {sys.version} on {sys.platform}"
+ )
code.interact(banner)
@@ -457,11 +406,13 @@ def interactive():
is_flag=True, default=False)
def version(build):
"""Print OpenPype version."""
+ if AYON_SERVER_ENABLED:
+ print(os.environ["AYON_VERSION"])
+ return
from openpype.version import __version__
from igniter.bootstrap_repos import BootstrapRepos, OpenPypeVersion
from pathlib import Path
- import os
if getattr(sys, 'frozen', False):
local_version = BootstrapRepos.get_version(
diff --git a/openpype/client/entities.py b/openpype/client/entities.py
index adbdd7a47c..5d9654c611 100644
--- a/openpype/client/entities.py
+++ b/openpype/client/entities.py
@@ -1,1553 +1,6 @@
-"""Unclear if these will have public functions like these.
+from openpype import AYON_SERVER_ENABLED
-Goal is that most of functions here are called on (or with) an object
-that has project name as a context (e.g. on 'ProjectEntity'?).
-
-+ We will need more specific functions doing very specific queries really fast.
-"""
-
-import re
-import collections
-
-import six
-from bson.objectid import ObjectId
-
-from .mongo import get_project_database, get_project_connection
-
-PatternType = type(re.compile(""))
-
-
-def _prepare_fields(fields, required_fields=None):
- if not fields:
- return None
-
- output = {
- field: True
- for field in fields
- }
- if "_id" not in output:
- output["_id"] = True
-
- if required_fields:
- for key in required_fields:
- output[key] = True
- return output
-
-
-def convert_id(in_id):
- """Helper function for conversion of id from string to ObjectId.
-
- Args:
- in_id (Union[str, ObjectId, Any]): Entity id that should be converted
- to right type for queries.
-
- Returns:
- Union[ObjectId, Any]: Converted ids to ObjectId or in type.
- """
-
- if isinstance(in_id, six.string_types):
- return ObjectId(in_id)
- return in_id
-
-
-def convert_ids(in_ids):
- """Helper function for conversion of ids from string to ObjectId.
-
- Args:
- in_ids (Iterable[Union[str, ObjectId, Any]]): List of entity ids that
- should be converted to right type for queries.
-
- Returns:
- List[ObjectId]: Converted ids to ObjectId.
- """
-
- _output = set()
- for in_id in in_ids:
- if in_id is not None:
- _output.add(convert_id(in_id))
- return list(_output)
-
-
-def get_projects(active=True, inactive=False, fields=None):
- """Yield all project entity documents.
-
- Args:
- active (Optional[bool]): Include active projects. Defaults to True.
- inactive (Optional[bool]): Include inactive projects.
- Defaults to False.
- fields (Optional[Iterable[str]]): Fields that should be returned. All
- fields are returned if 'None' is passed.
-
- Yields:
- dict: Project entity data which can be reduced to specified 'fields'.
- None is returned if project with specified filters was not found.
- """
- mongodb = get_project_database()
- for project_name in mongodb.collection_names():
- if project_name in ("system.indexes",):
- continue
- project_doc = get_project(
- project_name, active=active, inactive=inactive, fields=fields
- )
- if project_doc is not None:
- yield project_doc
-
-
-def get_project(project_name, active=True, inactive=True, fields=None):
- """Return project entity document by project name.
-
- Args:
- project_name (str): Name of project.
- active (Optional[bool]): Allow active project. Defaults to True.
- inactive (Optional[bool]): Allow inactive project. Defaults to True.
- fields (Optional[Iterable[str]]): Fields that should be returned. All
- fields are returned if 'None' is passed.
-
- Returns:
- Union[Dict, None]: Project entity data which can be reduced to
- specified 'fields'. None is returned if project with specified
- filters was not found.
- """
- # Skip if both are disabled
- if not active and not inactive:
- return None
-
- query_filter = {"type": "project"}
- # Keep query untouched if both should be available
- if active and inactive:
- pass
-
- # Add filter to keep only active
- elif active:
- query_filter["$or"] = [
- {"data.active": {"$exists": False}},
- {"data.active": True},
- ]
-
- # Add filter to keep only inactive
- elif inactive:
- query_filter["$or"] = [
- {"data.active": {"$exists": False}},
- {"data.active": False},
- ]
-
- conn = get_project_connection(project_name)
- return conn.find_one(query_filter, _prepare_fields(fields))
-
-
-def get_whole_project(project_name):
- """Receive all documents from project.
-
- Helper that can be used to get all document from whole project. For example
- for backups etc.
-
- Returns:
- Cursor: Query cursor as iterable which returns all documents from
- project collection.
- """
-
- conn = get_project_connection(project_name)
- return conn.find({})
-
-
-def get_asset_by_id(project_name, asset_id, fields=None):
- """Receive asset data by its id.
-
- Args:
- project_name (str): Name of project where to look for queried entities.
- asset_id (Union[str, ObjectId]): Asset's id.
- fields (Optional[Iterable[str]]): Fields that should be returned. All
- fields are returned if 'None' is passed.
-
- Returns:
- Union[Dict, None]: Asset entity data which can be reduced to
- specified 'fields'. None is returned if asset with specified
- filters was not found.
- """
-
- asset_id = convert_id(asset_id)
- if not asset_id:
- return None
-
- query_filter = {"type": "asset", "_id": asset_id}
- conn = get_project_connection(project_name)
- return conn.find_one(query_filter, _prepare_fields(fields))
-
-
-def get_asset_by_name(project_name, asset_name, fields=None):
- """Receive asset data by its name.
-
- Args:
- project_name (str): Name of project where to look for queried entities.
- asset_name (str): Asset's name.
- fields (Optional[Iterable[str]]): Fields that should be returned. All
- fields are returned if 'None' is passed.
-
- Returns:
- Union[Dict, None]: Asset entity data which can be reduced to
- specified 'fields'. None is returned if asset with specified
- filters was not found.
- """
-
- if not asset_name:
- return None
-
- query_filter = {"type": "asset", "name": asset_name}
- conn = get_project_connection(project_name)
- return conn.find_one(query_filter, _prepare_fields(fields))
-
-
-# NOTE this could be just public function?
-# - any better variable name instead of 'standard'?
-# - same approach can be used for rest of types
-def _get_assets(
- project_name,
- asset_ids=None,
- asset_names=None,
- parent_ids=None,
- standard=True,
- archived=False,
- fields=None
-):
- """Assets for specified project by passed filters.
-
- Passed filters (ids and names) are always combined so all conditions must
- match.
-
- To receive all assets from project just keep filters empty.
-
- Args:
- project_name (str): Name of project where to look for queried entities.
- asset_ids (Iterable[Union[str, ObjectId]]): Asset ids that should
- be found.
- asset_names (Iterable[str]): Name assets that should be found.
- parent_ids (Iterable[Union[str, ObjectId]]): Parent asset ids.
- standard (bool): Query standard assets (type 'asset').
- archived (bool): Query archived assets (type 'archived_asset').
- fields (Optional[Iterable[str]]): Fields that should be returned. All
- fields are returned if 'None' is passed.
-
- Returns:
- Cursor: Query cursor as iterable which returns asset documents matching
- passed filters.
- """
-
- asset_types = []
- if standard:
- asset_types.append("asset")
- if archived:
- asset_types.append("archived_asset")
-
- if not asset_types:
- return []
-
- if len(asset_types) == 1:
- query_filter = {"type": asset_types[0]}
- else:
- query_filter = {"type": {"$in": asset_types}}
-
- if asset_ids is not None:
- asset_ids = convert_ids(asset_ids)
- if not asset_ids:
- return []
- query_filter["_id"] = {"$in": asset_ids}
-
- if asset_names is not None:
- if not asset_names:
- return []
- query_filter["name"] = {"$in": list(asset_names)}
-
- if parent_ids is not None:
- parent_ids = convert_ids(parent_ids)
- if not parent_ids:
- return []
- query_filter["data.visualParent"] = {"$in": parent_ids}
-
- conn = get_project_connection(project_name)
-
- return conn.find(query_filter, _prepare_fields(fields))
-
-
-def get_assets(
- project_name,
- asset_ids=None,
- asset_names=None,
- parent_ids=None,
- archived=False,
- fields=None
-):
- """Assets for specified project by passed filters.
-
- Passed filters (ids and names) are always combined so all conditions must
- match.
-
- To receive all assets from project just keep filters empty.
-
- Args:
- project_name (str): Name of project where to look for queried entities.
- asset_ids (Iterable[Union[str, ObjectId]]): Asset ids that should
- be found.
- asset_names (Iterable[str]): Name assets that should be found.
- parent_ids (Iterable[Union[str, ObjectId]]): Parent asset ids.
- archived (bool): Add also archived assets.
- fields (Optional[Iterable[str]]): Fields that should be returned. All
- fields are returned if 'None' is passed.
-
- Returns:
- Cursor: Query cursor as iterable which returns asset documents matching
- passed filters.
- """
-
- return _get_assets(
- project_name,
- asset_ids,
- asset_names,
- parent_ids,
- True,
- archived,
- fields
- )
-
-
-def get_archived_assets(
- project_name,
- asset_ids=None,
- asset_names=None,
- parent_ids=None,
- fields=None
-):
- """Archived assets for specified project by passed filters.
-
- Passed filters (ids and names) are always combined so all conditions must
- match.
-
- To receive all archived assets from project just keep filters empty.
-
- Args:
- project_name (str): Name of project where to look for queried entities.
- asset_ids (Iterable[Union[str, ObjectId]]): Asset ids that should
- be found.
- asset_names (Iterable[str]): Name assets that should be found.
- parent_ids (Iterable[Union[str, ObjectId]]): Parent asset ids.
- fields (Optional[Iterable[str]]): Fields that should be returned. All
- fields are returned if 'None' is passed.
-
- Returns:
- Cursor: Query cursor as iterable which returns asset documents matching
- passed filters.
- """
-
- return _get_assets(
- project_name, asset_ids, asset_names, parent_ids, False, True, fields
- )
-
-
-def get_asset_ids_with_subsets(project_name, asset_ids=None):
- """Find out which assets have existing subsets.
-
- Args:
- project_name (str): Name of project where to look for queried entities.
- asset_ids (Iterable[Union[str, ObjectId]]): Look only for entered
- asset ids.
-
- Returns:
- Iterable[ObjectId]: Asset ids that have existing subsets.
- """
-
- subset_query = {
- "type": "subset"
- }
- if asset_ids is not None:
- asset_ids = convert_ids(asset_ids)
- if not asset_ids:
- return []
- subset_query["parent"] = {"$in": asset_ids}
-
- conn = get_project_connection(project_name)
- result = conn.aggregate([
- {
- "$match": subset_query
- },
- {
- "$group": {
- "_id": "$parent",
- "count": {"$sum": 1}
- }
- }
- ])
- asset_ids_with_subsets = []
- for item in result:
- asset_id = item["_id"]
- count = item["count"]
- if count > 0:
- asset_ids_with_subsets.append(asset_id)
- return asset_ids_with_subsets
-
-
-def get_subset_by_id(project_name, subset_id, fields=None):
- """Single subset entity data by its id.
-
- Args:
- project_name (str): Name of project where to look for queried entities.
- subset_id (Union[str, ObjectId]): Id of subset which should be found.
- fields (Optional[Iterable[str]]): Fields that should be returned. All
- fields are returned if 'None' is passed.
-
- Returns:
- Union[Dict, None]: Subset entity data which can be reduced to
- specified 'fields'. None is returned if subset with specified
- filters was not found.
- """
-
- subset_id = convert_id(subset_id)
- if not subset_id:
- return None
-
- query_filters = {"type": "subset", "_id": subset_id}
- conn = get_project_connection(project_name)
- return conn.find_one(query_filters, _prepare_fields(fields))
-
-
-def get_subset_by_name(project_name, subset_name, asset_id, fields=None):
- """Single subset entity data by its name and its version id.
-
- Args:
- project_name (str): Name of project where to look for queried entities.
- subset_name (str): Name of subset.
- asset_id (Union[str, ObjectId]): Id of parent asset.
- fields (Optional[Iterable[str]]): Fields that should be returned. All
- fields are returned if 'None' is passed.
-
- Returns:
- Union[Dict, None]: Subset entity data which can be reduced to
- specified 'fields'. None is returned if subset with specified
- filters was not found.
- """
- if not subset_name:
- return None
-
- asset_id = convert_id(asset_id)
- if not asset_id:
- return None
-
- query_filters = {
- "type": "subset",
- "name": subset_name,
- "parent": asset_id
- }
- conn = get_project_connection(project_name)
- return conn.find_one(query_filters, _prepare_fields(fields))
-
-
-def get_subsets(
- project_name,
- subset_ids=None,
- subset_names=None,
- asset_ids=None,
- names_by_asset_ids=None,
- archived=False,
- fields=None
-):
- """Subset entities data from one project filtered by entered filters.
-
- Filters are additive (all conditions must pass to return subset).
-
- Args:
- project_name (str): Name of project where to look for queried entities.
- subset_ids (Iterable[Union[str, ObjectId]]): Subset ids that should be
- queried. Filter ignored if 'None' is passed.
- subset_names (Iterable[str]): Subset names that should be queried.
- Filter ignored if 'None' is passed.
- asset_ids (Iterable[Union[str, ObjectId]]): Asset ids under which
- should look for the subsets. Filter ignored if 'None' is passed.
- names_by_asset_ids (dict[ObjectId, List[str]]): Complex filtering
- using asset ids and list of subset names under the asset.
- archived (bool): Look for archived subsets too.
- fields (Optional[Iterable[str]]): Fields that should be returned. All
- fields are returned if 'None' is passed.
-
- Returns:
- Cursor: Iterable cursor yielding all matching subsets.
- """
-
- subset_types = ["subset"]
- if archived:
- subset_types.append("archived_subset")
-
- if len(subset_types) == 1:
- query_filter = {"type": subset_types[0]}
- else:
- query_filter = {"type": {"$in": subset_types}}
-
- if asset_ids is not None:
- asset_ids = convert_ids(asset_ids)
- if not asset_ids:
- return []
- query_filter["parent"] = {"$in": asset_ids}
-
- if subset_ids is not None:
- subset_ids = convert_ids(subset_ids)
- if not subset_ids:
- return []
- query_filter["_id"] = {"$in": subset_ids}
-
- if subset_names is not None:
- if not subset_names:
- return []
- query_filter["name"] = {"$in": list(subset_names)}
-
- if names_by_asset_ids is not None:
- or_query = []
- for asset_id, names in names_by_asset_ids.items():
- if asset_id and names:
- or_query.append({
- "parent": convert_id(asset_id),
- "name": {"$in": list(names)}
- })
- if not or_query:
- return []
- query_filter["$or"] = or_query
-
- conn = get_project_connection(project_name)
- return conn.find(query_filter, _prepare_fields(fields))
-
-
-def get_subset_families(project_name, subset_ids=None):
- """Set of main families of subsets.
-
- Args:
- project_name (str): Name of project where to look for queried entities.
- subset_ids (Iterable[Union[str, ObjectId]]): Subset ids that should
- be queried. All subsets from project are used if 'None' is passed.
-
- Returns:
- set[str]: Main families of matching subsets.
- """
-
- subset_filter = {
- "type": "subset"
- }
- if subset_ids is not None:
- if not subset_ids:
- return set()
- subset_filter["_id"] = {"$in": list(subset_ids)}
-
- conn = get_project_connection(project_name)
- result = list(conn.aggregate([
- {"$match": subset_filter},
- {"$project": {
- "family": {"$arrayElemAt": ["$data.families", 0]}
- }},
- {"$group": {
- "_id": "family_group",
- "families": {"$addToSet": "$family"}
- }}
- ]))
- if result:
- return set(result[0]["families"])
- return set()
-
-
-def get_version_by_id(project_name, version_id, fields=None):
- """Single version entity data by its id.
-
- Args:
- project_name (str): Name of project where to look for queried entities.
- version_id (Union[str, ObjectId]): Id of version which should be found.
- fields (Optional[Iterable[str]]): Fields that should be returned. All
- fields are returned if 'None' is passed.
-
- Returns:
- Union[Dict, None]: Version entity data which can be reduced to
- specified 'fields'. None is returned if version with specified
- filters was not found.
- """
-
- version_id = convert_id(version_id)
- if not version_id:
- return None
-
- query_filter = {
- "type": {"$in": ["version", "hero_version"]},
- "_id": version_id
- }
- conn = get_project_connection(project_name)
- return conn.find_one(query_filter, _prepare_fields(fields))
-
-
-def get_version_by_name(project_name, version, subset_id, fields=None):
- """Single version entity data by its name and subset id.
-
- Args:
- project_name (str): Name of project where to look for queried entities.
- version (int): name of version entity (its version).
- subset_id (Union[str, ObjectId]): Id of version which should be found.
- fields (Optional[Iterable[str]]): Fields that should be returned. All
- fields are returned if 'None' is passed.
-
- Returns:
- Union[Dict, None]: Version entity data which can be reduced to
- specified 'fields'. None is returned if version with specified
- filters was not found.
- """
-
- subset_id = convert_id(subset_id)
- if not subset_id:
- return None
-
- conn = get_project_connection(project_name)
- query_filter = {
- "type": "version",
- "parent": subset_id,
- "name": version
- }
- return conn.find_one(query_filter, _prepare_fields(fields))
-
-
-def version_is_latest(project_name, version_id):
- """Is version the latest from its subset.
-
- Note:
- Hero versions are considered as latest.
-
- Todo:
- Maybe raise exception when version was not found?
-
- Args:
- project_name (str):Name of project where to look for queried entities.
- version_id (Union[str, ObjectId]): Version id which is checked.
-
- Returns:
- bool: True if is latest version from subset else False.
- """
-
- version_id = convert_id(version_id)
- if not version_id:
- return False
- version_doc = get_version_by_id(
- project_name, version_id, fields=["_id", "type", "parent"]
- )
- # What to do when version is not found?
- if not version_doc:
- return False
-
- if version_doc["type"] == "hero_version":
- return True
-
- last_version = get_last_version_by_subset_id(
- project_name, version_doc["parent"], fields=["_id"]
- )
- return last_version["_id"] == version_id
-
-
-def _get_versions(
- project_name,
- subset_ids=None,
- version_ids=None,
- versions=None,
- standard=True,
- hero=False,
- fields=None
-):
- version_types = []
- if standard:
- version_types.append("version")
-
- if hero:
- version_types.append("hero_version")
-
- if not version_types:
- return []
- elif len(version_types) == 1:
- query_filter = {"type": version_types[0]}
- else:
- query_filter = {"type": {"$in": version_types}}
-
- if subset_ids is not None:
- subset_ids = convert_ids(subset_ids)
- if not subset_ids:
- return []
- query_filter["parent"] = {"$in": subset_ids}
-
- if version_ids is not None:
- version_ids = convert_ids(version_ids)
- if not version_ids:
- return []
- query_filter["_id"] = {"$in": version_ids}
-
- if versions is not None:
- versions = list(versions)
- if not versions:
- return []
-
- if len(versions) == 1:
- query_filter["name"] = versions[0]
- else:
- query_filter["name"] = {"$in": versions}
-
- conn = get_project_connection(project_name)
-
- return conn.find(query_filter, _prepare_fields(fields))
-
-
-def get_versions(
- project_name,
- version_ids=None,
- subset_ids=None,
- versions=None,
- hero=False,
- fields=None
-):
- """Version entities data from one project filtered by entered filters.
-
- Filters are additive (all conditions must pass to return subset).
-
- Args:
- project_name (str): Name of project where to look for queried entities.
- version_ids (Iterable[Union[str, ObjectId]]): Version ids that will
- be queried. Filter ignored if 'None' is passed.
- subset_ids (Iterable[str]): Subset ids that will be queried.
- Filter ignored if 'None' is passed.
- versions (Iterable[int]): Version names (as integers).
- Filter ignored if 'None' is passed.
- hero (bool): Look also for hero versions.
- fields (Optional[Iterable[str]]): Fields that should be returned. All
- fields are returned if 'None' is passed.
-
- Returns:
- Cursor: Iterable cursor yielding all matching versions.
- """
-
- return _get_versions(
- project_name,
- subset_ids,
- version_ids,
- versions,
- standard=True,
- hero=hero,
- fields=fields
- )
-
-
-def get_hero_version_by_subset_id(project_name, subset_id, fields=None):
- """Hero version by subset id.
-
- Args:
- project_name (str): Name of project where to look for queried entities.
- subset_id (Union[str, ObjectId]): Subset id under which
- is hero version.
- fields (Optional[Iterable[str]]): Fields that should be returned. All
- fields are returned if 'None' is passed.
-
- Returns:
- Union[Dict, None]: Hero version entity data which can be reduced to
- specified 'fields'. None is returned if hero version with specified
- filters was not found.
- """
-
- subset_id = convert_id(subset_id)
- if not subset_id:
- return None
-
- versions = list(_get_versions(
- project_name,
- subset_ids=[subset_id],
- standard=False,
- hero=True,
- fields=fields
- ))
- if versions:
- return versions[0]
- return None
-
-
-def get_hero_version_by_id(project_name, version_id, fields=None):
- """Hero version by its id.
-
- Args:
- project_name (str): Name of project where to look for queried entities.
- version_id (Union[str, ObjectId]): Hero version id.
- fields (Optional[Iterable[str]]): Fields that should be returned. All
- fields are returned if 'None' is passed.
-
- Returns:
- Union[Dict, None]: Hero version entity data which can be reduced to
- specified 'fields'. None is returned if hero version with specified
- filters was not found.
- """
-
- version_id = convert_id(version_id)
- if not version_id:
- return None
-
- versions = list(_get_versions(
- project_name,
- version_ids=[version_id],
- standard=False,
- hero=True,
- fields=fields
- ))
- if versions:
- return versions[0]
- return None
-
-
-def get_hero_versions(
- project_name,
- subset_ids=None,
- version_ids=None,
- fields=None
-):
- """Hero version entities data from one project filtered by entered filters.
-
- Args:
- project_name (str): Name of project where to look for queried entities.
- subset_ids (Iterable[Union[str, ObjectId]]): Subset ids for which
- should look for hero versions. Filter ignored if 'None' is passed.
- version_ids (Iterable[Union[str, ObjectId]]): Hero version ids. Filter
- ignored if 'None' is passed.
- fields (Optional[Iterable[str]]): Fields that should be returned. All
- fields are returned if 'None' is passed.
-
- Returns:
- Cursor|list: Iterable yielding hero versions matching passed filters.
- """
-
- return _get_versions(
- project_name,
- subset_ids,
- version_ids,
- standard=False,
- hero=True,
- fields=fields
- )
-
-
-def get_output_link_versions(project_name, version_id, fields=None):
- """Versions where passed version was used as input.
-
- Question:
- Not 100% sure about the usage of the function so the name and docstring
- maybe does not match what it does?
-
- Args:
- project_name (str): Name of project where to look for queried entities.
- version_id (Union[str, ObjectId]): Version id which can be used
- as input link for other versions.
- fields (Optional[Iterable[str]]): Fields that should be returned. All
- fields are returned if 'None' is passed.
-
- Returns:
- Iterable: Iterable cursor yielding versions that are used as input
- links for passed version.
- """
-
- version_id = convert_id(version_id)
- if not version_id:
- return []
-
- conn = get_project_connection(project_name)
- # Does make sense to look for hero versions?
- query_filter = {
- "type": "version",
- "data.inputLinks.id": version_id
- }
- return conn.find(query_filter, _prepare_fields(fields))
-
-
-def get_last_versions(project_name, subset_ids, active=None, fields=None):
- """Latest versions for entered subset_ids.
-
- Args:
- project_name (str): Name of project where to look for queried entities.
- subset_ids (Iterable[Union[str, ObjectId]]): List of subset ids.
- active (Optional[bool]): If True only active versions are returned.
- fields (Optional[Iterable[str]]): Fields that should be returned. All
- fields are returned if 'None' is passed.
-
- Returns:
- dict[ObjectId, int]: Key is subset id and value is last version name.
- """
-
- subset_ids = convert_ids(subset_ids)
- if not subset_ids:
- return {}
-
- if fields is not None:
- fields = list(fields)
- if not fields:
- return {}
-
- # Avoid double query if only name and _id are requested
- name_needed = False
- limit_query = False
- if fields:
- fields_s = set(fields)
- if "name" in fields_s:
- name_needed = True
- fields_s.remove("name")
-
- for field in ("_id", "parent"):
- if field in fields_s:
- fields_s.remove(field)
- limit_query = len(fields_s) == 0
-
- group_item = {
- "_id": "$parent",
- "_version_id": {"$last": "$_id"}
- }
- # Add name if name is needed (only for limit query)
- if name_needed:
- group_item["name"] = {"$last": "$name"}
-
- aggregate_filter = {
- "type": "version",
- "parent": {"$in": subset_ids}
- }
- if active is False:
- aggregate_filter["data.active"] = active
- elif active is True:
- aggregate_filter["$or"] = [
- {"data.active": {"$exists": 0}},
- {"data.active": active},
- ]
-
- aggregation_pipeline = [
- # Find all versions of those subsets
- {"$match": aggregate_filter},
- # Sorting versions all together
- {"$sort": {"name": 1}},
- # Group them by "parent", but only take the last
- {"$group": group_item}
- ]
-
- conn = get_project_connection(project_name)
- aggregate_result = conn.aggregate(aggregation_pipeline)
- if limit_query:
- output = {}
- for item in aggregate_result:
- subset_id = item["_id"]
- item_data = {"_id": item["_version_id"], "parent": subset_id}
- if name_needed:
- item_data["name"] = item["name"]
- output[subset_id] = item_data
- return output
-
- version_ids = [
- doc["_version_id"]
- for doc in aggregate_result
- ]
-
- fields = _prepare_fields(fields, ["parent"])
-
- version_docs = get_versions(
- project_name, version_ids=version_ids, fields=fields
- )
-
- return {
- version_doc["parent"]: version_doc
- for version_doc in version_docs
- }
-
-
-def get_last_version_by_subset_id(project_name, subset_id, fields=None):
- """Last version for passed subset id.
-
- Args:
- project_name (str): Name of project where to look for queried entities.
- subset_id (Union[str, ObjectId]): Id of version which should be found.
- fields (Optional[Iterable[str]]): Fields that should be returned. All
- fields are returned if 'None' is passed.
-
- Returns:
- Union[Dict, None]: Version entity data which can be reduced to
- specified 'fields'. None is returned if version with specified
- filters was not found.
- """
-
- subset_id = convert_id(subset_id)
- if not subset_id:
- return None
-
- last_versions = get_last_versions(
- project_name, subset_ids=[subset_id], fields=fields
- )
- return last_versions.get(subset_id)
-
-
-def get_last_version_by_subset_name(
- project_name, subset_name, asset_id=None, asset_name=None, fields=None
-):
- """Last version for passed subset name under asset id/name.
-
- It is required to pass 'asset_id' or 'asset_name'. Asset id is recommended
- if is available.
-
- Args:
- project_name (str): Name of project where to look for queried entities.
- subset_name (str): Name of subset.
- asset_id (Union[str, ObjectId]): Asset id which is parent of passed
- subset name.
- asset_name (str): Asset name which is parent of passed subset name.
- fields (Optional[Iterable[str]]): Fields that should be returned. All
- fields are returned if 'None' is passed.
-
- Returns:
- Union[Dict, None]: Version entity data which can be reduced to
- specified 'fields'. None is returned if version with specified
- filters was not found.
- """
-
- if not asset_id and not asset_name:
- return None
-
- if not asset_id:
- asset_doc = get_asset_by_name(project_name, asset_name, fields=["_id"])
- if not asset_doc:
- return None
- asset_id = asset_doc["_id"]
- subset_doc = get_subset_by_name(
- project_name, subset_name, asset_id, fields=["_id"]
- )
- if not subset_doc:
- return None
- return get_last_version_by_subset_id(
- project_name, subset_doc["_id"], fields=fields
- )
-
-
-def get_representation_by_id(project_name, representation_id, fields=None):
- """Representation entity data by its id.
-
- Args:
- project_name (str): Name of project where to look for queried entities.
- representation_id (Union[str, ObjectId]): Representation id.
- fields (Optional[Iterable[str]]): Fields that should be returned. All
- fields are returned if 'None' is passed.
-
- Returns:
- Union[Dict, None]: Representation entity data which can be reduced to
- specified 'fields'. None is returned if representation with
- specified filters was not found.
- """
-
- if not representation_id:
- return None
-
- repre_types = ["representation", "archived_representation"]
- query_filter = {
- "type": {"$in": repre_types}
- }
- if representation_id is not None:
- query_filter["_id"] = convert_id(representation_id)
-
- conn = get_project_connection(project_name)
-
- return conn.find_one(query_filter, _prepare_fields(fields))
-
-
-def get_representation_by_name(
- project_name, representation_name, version_id, fields=None
-):
- """Representation entity data by its name and its version id.
-
- Args:
- project_name (str): Name of project where to look for queried entities.
- representation_name (str): Representation name.
- version_id (Union[str, ObjectId]): Id of parent version entity.
- fields (Optional[Iterable[str]]): Fields that should be returned. All
- fields are returned if 'None' is passed.
-
- Returns:
- Union[dict[str, Any], None]: Representation entity data which can be
- reduced to specified 'fields'. None is returned if representation
- with specified filters was not found.
- """
-
- version_id = convert_id(version_id)
- if not version_id or not representation_name:
- return None
- repre_types = ["representation", "archived_representations"]
- query_filter = {
- "type": {"$in": repre_types},
- "name": representation_name,
- "parent": version_id
- }
-
- conn = get_project_connection(project_name)
- return conn.find_one(query_filter, _prepare_fields(fields))
-
-
-def _flatten_dict(data):
- flatten_queue = collections.deque()
- flatten_queue.append(data)
- output = {}
- while flatten_queue:
- item = flatten_queue.popleft()
- for key, value in item.items():
- if not isinstance(value, dict):
- output[key] = value
- continue
-
- tmp = {}
- for subkey, subvalue in value.items():
- new_key = "{}.{}".format(key, subkey)
- tmp[new_key] = subvalue
- flatten_queue.append(tmp)
- return output
-
-
-def _regex_filters(filters):
- output = []
- for key, value in filters.items():
- regexes = []
- a_values = []
- if isinstance(value, PatternType):
- regexes.append(value)
- elif isinstance(value, (list, tuple, set)):
- for item in value:
- if isinstance(item, PatternType):
- regexes.append(item)
- else:
- a_values.append(item)
- else:
- a_values.append(value)
-
- key_filters = []
- if len(a_values) == 1:
- key_filters.append({key: a_values[0]})
- elif a_values:
- key_filters.append({key: {"$in": a_values}})
-
- for regex in regexes:
- key_filters.append({key: {"$regex": regex}})
-
- if len(key_filters) == 1:
- output.append(key_filters[0])
- else:
- output.append({"$or": key_filters})
-
- return output
-
-
-def _get_representations(
- project_name,
- representation_ids,
- representation_names,
- version_ids,
- context_filters,
- names_by_version_ids,
- standard,
- archived,
- fields
-):
- default_output = []
- repre_types = []
- if standard:
- repre_types.append("representation")
- if archived:
- repre_types.append("archived_representation")
-
- if not repre_types:
- return default_output
-
- if len(repre_types) == 1:
- query_filter = {"type": repre_types[0]}
- else:
- query_filter = {"type": {"$in": repre_types}}
-
- if representation_ids is not None:
- representation_ids = convert_ids(representation_ids)
- if not representation_ids:
- return default_output
- query_filter["_id"] = {"$in": representation_ids}
-
- if representation_names is not None:
- if not representation_names:
- return default_output
- query_filter["name"] = {"$in": list(representation_names)}
-
- if version_ids is not None:
- version_ids = convert_ids(version_ids)
- if not version_ids:
- return default_output
- query_filter["parent"] = {"$in": version_ids}
-
- or_queries = []
- if names_by_version_ids is not None:
- or_query = []
- for version_id, names in names_by_version_ids.items():
- if version_id and names:
- or_query.append({
- "parent": convert_id(version_id),
- "name": {"$in": list(names)}
- })
- if not or_query:
- return default_output
- or_queries.append(or_query)
-
- if context_filters is not None:
- if not context_filters:
- return []
- _flatten_filters = _flatten_dict(context_filters)
- flatten_filters = {}
- for key, value in _flatten_filters.items():
- if not key.startswith("context"):
- key = "context.{}".format(key)
- flatten_filters[key] = value
-
- for item in _regex_filters(flatten_filters):
- for key, value in item.items():
- if key != "$or":
- query_filter[key] = value
-
- elif value:
- or_queries.append(value)
-
- if len(or_queries) == 1:
- query_filter["$or"] = or_queries[0]
- elif or_queries:
- and_query = []
- for or_query in or_queries:
- if isinstance(or_query, list):
- or_query = {"$or": or_query}
- and_query.append(or_query)
- query_filter["$and"] = and_query
-
- conn = get_project_connection(project_name)
-
- return conn.find(query_filter, _prepare_fields(fields))
-
-
-def get_representations(
- project_name,
- representation_ids=None,
- representation_names=None,
- version_ids=None,
- context_filters=None,
- names_by_version_ids=None,
- archived=False,
- standard=True,
- fields=None
-):
- """Representation entities data from one project filtered by filters.
-
- Filters are additive (all conditions must pass to return subset).
-
- Args:
- project_name (str): Name of project where to look for queried entities.
- representation_ids (Iterable[Union[str, ObjectId]]): Representation ids
- used as filter. Filter ignored if 'None' is passed.
- representation_names (Iterable[str]): Representations names used
- as filter. Filter ignored if 'None' is passed.
- version_ids (Iterable[str]): Subset ids used as parent filter. Filter
- ignored if 'None' is passed.
- context_filters (Dict[str, List[str, PatternType]]): Filter by
- representation context fields.
- names_by_version_ids (dict[ObjectId, list[str]]): Complex filtering
- using version ids and list of names under the version.
- archived (bool): Output will also contain archived representations.
- fields (Optional[Iterable[str]]): Fields that should be returned. All
- fields are returned if 'None' is passed.
-
- Returns:
- Cursor: Iterable cursor yielding all matching representations.
- """
-
- return _get_representations(
- project_name=project_name,
- representation_ids=representation_ids,
- representation_names=representation_names,
- version_ids=version_ids,
- context_filters=context_filters,
- names_by_version_ids=names_by_version_ids,
- standard=standard,
- archived=archived,
- fields=fields
- )
-
-
-def get_archived_representations(
- project_name,
- representation_ids=None,
- representation_names=None,
- version_ids=None,
- context_filters=None,
- names_by_version_ids=None,
- fields=None
-):
- """Archived representation entities data from project with applied filters.
-
- Filters are additive (all conditions must pass to return subset).
-
- Args:
- project_name (str): Name of project where to look for queried entities.
- representation_ids (Iterable[Union[str, ObjectId]]): Representation ids
- used as filter. Filter ignored if 'None' is passed.
- representation_names (Iterable[str]): Representations names used
- as filter. Filter ignored if 'None' is passed.
- version_ids (Iterable[str]): Subset ids used as parent filter. Filter
- ignored if 'None' is passed.
- context_filters (Dict[str, List[str, PatternType]]): Filter by
- representation context fields.
- names_by_version_ids (dict[ObjectId, List[str]]): Complex filtering
- using version ids and list of names under the version.
- fields (Optional[Iterable[str]]): Fields that should be returned. All
- fields are returned if 'None' is passed.
-
- Returns:
- Cursor: Iterable cursor yielding all matching representations.
- """
-
- return _get_representations(
- project_name=project_name,
- representation_ids=representation_ids,
- representation_names=representation_names,
- version_ids=version_ids,
- context_filters=context_filters,
- names_by_version_ids=names_by_version_ids,
- standard=False,
- archived=True,
- fields=fields
- )
-
-
-def get_representations_parents(project_name, representations):
- """Prepare parents of representation entities.
-
- Each item of returned dictionary contains version, subset, asset
- and project in that order.
-
- Args:
- project_name (str): Name of project where to look for queried entities.
- representations (List[dict]): Representation entities with at least
- '_id' and 'parent' keys.
-
- Returns:
- dict[ObjectId, tuple]: Parents by representation id.
- """
-
- repre_docs_by_version_id = collections.defaultdict(list)
- version_docs_by_version_id = {}
- version_docs_by_subset_id = collections.defaultdict(list)
- subset_docs_by_subset_id = {}
- subset_docs_by_asset_id = collections.defaultdict(list)
- output = {}
- for repre_doc in representations:
- repre_id = repre_doc["_id"]
- version_id = repre_doc["parent"]
- output[repre_id] = (None, None, None, None)
- repre_docs_by_version_id[version_id].append(repre_doc)
-
- version_docs = get_versions(
- project_name,
- version_ids=repre_docs_by_version_id.keys(),
- hero=True
- )
- for version_doc in version_docs:
- version_id = version_doc["_id"]
- subset_id = version_doc["parent"]
- version_docs_by_version_id[version_id] = version_doc
- version_docs_by_subset_id[subset_id].append(version_doc)
-
- subset_docs = get_subsets(
- project_name, subset_ids=version_docs_by_subset_id.keys()
- )
- for subset_doc in subset_docs:
- subset_id = subset_doc["_id"]
- asset_id = subset_doc["parent"]
- subset_docs_by_subset_id[subset_id] = subset_doc
- subset_docs_by_asset_id[asset_id].append(subset_doc)
-
- asset_docs = get_assets(
- project_name, asset_ids=subset_docs_by_asset_id.keys()
- )
- asset_docs_by_id = {
- asset_doc["_id"]: asset_doc
- for asset_doc in asset_docs
- }
-
- project_doc = get_project(project_name)
-
- for version_id, repre_docs in repre_docs_by_version_id.items():
- asset_doc = None
- subset_doc = None
- version_doc = version_docs_by_version_id.get(version_id)
- if version_doc:
- subset_id = version_doc["parent"]
- subset_doc = subset_docs_by_subset_id.get(subset_id)
- if subset_doc:
- asset_id = subset_doc["parent"]
- asset_doc = asset_docs_by_id.get(asset_id)
-
- for repre_doc in repre_docs:
- repre_id = repre_doc["_id"]
- output[repre_id] = (
- version_doc, subset_doc, asset_doc, project_doc
- )
- return output
-
-
-def get_representation_parents(project_name, representation):
- """Prepare parents of representation entity.
-
- Each item of returned dictionary contains version, subset, asset
- and project in that order.
-
- Args:
- project_name (str): Name of project where to look for queried entities.
- representation (dict): Representation entities with at least
- '_id' and 'parent' keys.
-
- Returns:
- dict[ObjectId, tuple]: Parents by representation id.
- """
-
- if not representation:
- return None
-
- repre_id = representation["_id"]
- parents_by_repre_id = get_representations_parents(
- project_name, [representation]
- )
- return parents_by_repre_id[repre_id]
-
-
-def get_thumbnail_id_from_source(project_name, src_type, src_id):
- """Receive thumbnail id from source entity.
-
- Args:
- project_name (str): Name of project where to look for queried entities.
- src_type (str): Type of source entity ('asset', 'version').
- src_id (Union[str, ObjectId]): Id of source entity.
-
- Returns:
- Union[ObjectId, None]: Thumbnail id assigned to entity. If Source
- entity does not have any thumbnail id assigned.
- """
-
- if not src_type or not src_id:
- return None
-
- query_filter = {"_id": convert_id(src_id)}
-
- conn = get_project_connection(project_name)
- src_doc = conn.find_one(query_filter, {"data.thumbnail_id"})
- if src_doc:
- return src_doc.get("data", {}).get("thumbnail_id")
- return None
-
-
-def get_thumbnails(project_name, thumbnail_ids, fields=None):
- """Receive thumbnails entity data.
-
- Thumbnail entity can be used to receive binary content of thumbnail based
- on its content and ThumbnailResolvers.
-
- Args:
- project_name (str): Name of project where to look for queried entities.
- thumbnail_ids (Iterable[Union[str, ObjectId]]): Ids of thumbnail
- entities.
- fields (Optional[Iterable[str]]): Fields that should be returned. All
- fields are returned if 'None' is passed.
-
- Returns:
- cursor: Cursor of queried documents.
- """
-
- if thumbnail_ids:
- thumbnail_ids = convert_ids(thumbnail_ids)
-
- if not thumbnail_ids:
- return []
- query_filter = {
- "type": "thumbnail",
- "_id": {"$in": thumbnail_ids}
- }
- conn = get_project_connection(project_name)
- return conn.find(query_filter, _prepare_fields(fields))
-
-
-def get_thumbnail(project_name, thumbnail_id, fields=None):
- """Receive thumbnail entity data.
-
- Args:
- project_name (str): Name of project where to look for queried entities.
- thumbnail_id (Union[str, ObjectId]): Id of thumbnail entity.
- fields (Optional[Iterable[str]]): Fields that should be returned. All
- fields are returned if 'None' is passed.
-
- Returns:
- Union[Dict, None]: Thumbnail entity data which can be reduced to
- specified 'fields'.None is returned if thumbnail with specified
- filters was not found.
- """
-
- if not thumbnail_id:
- return None
- query_filter = {"type": "thumbnail", "_id": convert_id(thumbnail_id)}
- conn = get_project_connection(project_name)
- return conn.find_one(query_filter, _prepare_fields(fields))
-
-
-def get_workfile_info(
- project_name, asset_id, task_name, filename, fields=None
-):
- """Document with workfile information.
-
- Warning:
- Query is based on filename and context which does not meant it will
- find always right and expected result. Information have limited usage
- and is not recommended to use it as source information about workfile.
-
- Args:
- project_name (str): Name of project where to look for queried entities.
- asset_id (Union[str, ObjectId]): Id of asset entity.
- task_name (str): Task name on asset.
- fields (Optional[Iterable[str]]): Fields that should be returned. All
- fields are returned if 'None' is passed.
-
- Returns:
- Union[Dict, None]: Workfile entity data which can be reduced to
- specified 'fields'.None is returned if workfile with specified
- filters was not found.
- """
-
- if not asset_id or not task_name or not filename:
- return None
-
- query_filter = {
- "type": "workfile",
- "parent": convert_id(asset_id),
- "task_name": task_name,
- "filename": filename
- }
- conn = get_project_connection(project_name)
- return conn.find_one(query_filter, _prepare_fields(fields))
-
-
-"""
-## Custom data storage:
-- Settings - OP settings overrides and local settings
-- Logging - logs from Logger
-- Webpublisher - jobs
-- Ftrack - events
-- Maya - Shaders
- - openpype/hosts/maya/api/shader_definition_editor.py
- - openpype/hosts/maya/plugins/publish/validate_model_name.py
-
-## Global publish plugins
-- openpype/plugins/publish/extract_hierarchy_avalon.py
- Create:
- - asset
- Update:
- - asset
-
-## Lib
-- openpype/lib/avalon_context.py
- Update:
- - workfile data
-- openpype/lib/project_backpack.py
- Update:
- - project
-"""
+if not AYON_SERVER_ENABLED:
+ from .mongo.entities import *
+else:
+ from .server.entities import *
diff --git a/openpype/client/entity_links.py b/openpype/client/entity_links.py
index b74b4ce7f6..e18970de90 100644
--- a/openpype/client/entity_links.py
+++ b/openpype/client/entity_links.py
@@ -1,243 +1,6 @@
-from .mongo import get_project_connection
-from .entities import (
- get_assets,
- get_asset_by_id,
- get_version_by_id,
- get_representation_by_id,
- convert_id,
-)
+from openpype import AYON_SERVER_ENABLED
-
-def get_linked_asset_ids(project_name, asset_doc=None, asset_id=None):
- """Extract linked asset ids from asset document.
-
- One of asset document or asset id must be passed.
-
- Note:
- Asset links now works only from asset to assets.
-
- Args:
- asset_doc (dict): Asset document from DB.
-
- Returns:
- List[Union[ObjectId, str]]: Asset ids of input links.
- """
-
- output = []
- if not asset_doc and not asset_id:
- return output
-
- if not asset_doc:
- asset_doc = get_asset_by_id(
- project_name, asset_id, fields=["data.inputLinks"]
- )
-
- input_links = asset_doc["data"].get("inputLinks")
- if not input_links:
- return output
-
- for item in input_links:
- # Backwards compatibility for "_id" key which was replaced with
- # "id"
- if "_id" in item:
- link_id = item["_id"]
- else:
- link_id = item["id"]
- output.append(link_id)
- return output
-
-
-def get_linked_assets(
- project_name, asset_doc=None, asset_id=None, fields=None
-):
- """Return linked assets based on passed asset document.
-
- One of asset document or asset id must be passed.
-
- Args:
- project_name (str): Name of project where to look for queried entities.
- asset_doc (Dict[str, Any]): Asset document from database.
- asset_id (Union[ObjectId, str]): Asset id. Can be used instead of
- asset document.
- fields (Iterable[str]): Fields that should be returned. All fields are
- returned if 'None' is passed.
-
- Returns:
- List[Dict[str, Any]]: Asset documents of input links for passed
- asset doc.
- """
-
- if not asset_doc:
- if not asset_id:
- return []
- asset_doc = get_asset_by_id(
- project_name,
- asset_id,
- fields=["data.inputLinks"]
- )
- if not asset_doc:
- return []
-
- link_ids = get_linked_asset_ids(project_name, asset_doc=asset_doc)
- if not link_ids:
- return []
-
- return list(get_assets(project_name, asset_ids=link_ids, fields=fields))
-
-
-def get_linked_representation_id(
- project_name, repre_doc=None, repre_id=None, link_type=None, max_depth=None
-):
- """Returns list of linked ids of particular type (if provided).
-
- One of representation document or representation id must be passed.
- Note:
- Representation links now works only from representation through version
- back to representations.
-
- Args:
- project_name (str): Name of project where look for links.
- repre_doc (Dict[str, Any]): Representation document.
- repre_id (Union[ObjectId, str]): Representation id.
- link_type (str): Type of link (e.g. 'reference', ...).
- max_depth (int): Limit recursion level. Default: 0
-
- Returns:
- List[ObjectId] Linked representation ids.
- """
-
- if repre_doc:
- repre_id = repre_doc["_id"]
-
- if repre_id:
- repre_id = convert_id(repre_id)
-
- if not repre_id and not repre_doc:
- return []
-
- version_id = None
- if repre_doc:
- version_id = repre_doc.get("parent")
-
- if not version_id:
- repre_doc = get_representation_by_id(
- project_name, repre_id, fields=["parent"]
- )
- version_id = repre_doc["parent"]
-
- if not version_id:
- return []
-
- version_doc = get_version_by_id(
- project_name, version_id, fields=["type", "version_id"]
- )
- if version_doc["type"] == "hero_version":
- version_id = version_doc["version_id"]
-
- if max_depth is None:
- max_depth = 0
-
- match = {
- "_id": version_id,
- # Links are not stored to hero versions at this moment so filter
- # is limited to just versions
- "type": "version"
- }
-
- graph_lookup = {
- "from": project_name,
- "startWith": "$data.inputLinks.id",
- "connectFromField": "data.inputLinks.id",
- "connectToField": "_id",
- "as": "outputs_recursive",
- "depthField": "depth"
- }
- if max_depth != 0:
- # We offset by -1 since 0 basically means no recursion
- # but the recursion only happens after the initial lookup
- # for outputs.
- graph_lookup["maxDepth"] = max_depth - 1
-
- query_pipeline = [
- # Match
- {"$match": match},
- # Recursive graph lookup for inputs
- {"$graphLookup": graph_lookup}
- ]
- conn = get_project_connection(project_name)
- result = conn.aggregate(query_pipeline)
- referenced_version_ids = _process_referenced_pipeline_result(
- result, link_type
- )
- if not referenced_version_ids:
- return []
-
- ref_ids = conn.distinct(
- "_id",
- filter={
- "parent": {"$in": list(referenced_version_ids)},
- "type": "representation"
- }
- )
-
- return list(ref_ids)
-
-
-def _process_referenced_pipeline_result(result, link_type):
- """Filters result from pipeline for particular link_type.
-
- Pipeline cannot use link_type directly in a query.
-
- Returns:
- (list)
- """
-
- referenced_version_ids = set()
- correctly_linked_ids = set()
- for item in result:
- input_links = item.get("data", {}).get("inputLinks")
- if not input_links:
- continue
-
- _filter_input_links(
- input_links,
- link_type,
- correctly_linked_ids
- )
-
- # outputs_recursive in random order, sort by depth
- outputs_recursive = item.get("outputs_recursive")
- if not outputs_recursive:
- continue
-
- for output in sorted(outputs_recursive, key=lambda o: o["depth"]):
- output_links = output.get("data", {}).get("inputLinks")
- if not output_links and output["type"] != "hero_version":
- continue
-
- # Leaf
- if output["_id"] not in correctly_linked_ids:
- continue
-
- _filter_input_links(
- output_links,
- link_type,
- correctly_linked_ids
- )
-
- referenced_version_ids.add(output["_id"])
-
- return referenced_version_ids
-
-
-def _filter_input_links(input_links, link_type, correctly_linked_ids):
- if not input_links: # to handle hero versions
- return
-
- for input_link in input_links:
- if link_type and input_link["type"] != link_type:
- continue
-
- link_id = input_link.get("id") or input_link.get("_id")
- if link_id is not None:
- correctly_linked_ids.add(link_id)
+if not AYON_SERVER_ENABLED:
+ from .mongo.entity_links import *
+else:
+ from .server.entity_links import *
diff --git a/openpype/client/mongo/__init__.py b/openpype/client/mongo/__init__.py
new file mode 100644
index 0000000000..9f62d7a9cf
--- /dev/null
+++ b/openpype/client/mongo/__init__.py
@@ -0,0 +1,26 @@
+from .mongo import (
+ MongoEnvNotSet,
+ get_default_components,
+ should_add_certificate_path_to_mongo_url,
+ validate_mongo_connection,
+ OpenPypeMongoConnection,
+ get_project_database,
+ get_project_connection,
+ load_json_file,
+ replace_project_documents,
+ store_project_documents,
+)
+
+
+__all__ = (
+ "MongoEnvNotSet",
+ "get_default_components",
+ "should_add_certificate_path_to_mongo_url",
+ "validate_mongo_connection",
+ "OpenPypeMongoConnection",
+ "get_project_database",
+ "get_project_connection",
+ "load_json_file",
+ "replace_project_documents",
+ "store_project_documents",
+)
diff --git a/openpype/client/mongo/entities.py b/openpype/client/mongo/entities.py
new file mode 100644
index 0000000000..260fde4594
--- /dev/null
+++ b/openpype/client/mongo/entities.py
@@ -0,0 +1,1555 @@
+"""Unclear if these will have public functions like these.
+
+Goal is that most of functions here are called on (or with) an object
+that has project name as a context (e.g. on 'ProjectEntity'?).
+
++ We will need more specific functions doing very specific queries really fast.
+"""
+
+import re
+import collections
+
+import six
+from bson.objectid import ObjectId
+
+from .mongo import get_project_database, get_project_connection
+
+PatternType = type(re.compile(""))
+
+
+def _prepare_fields(fields, required_fields=None):
+ if not fields:
+ return None
+
+ output = {
+ field: True
+ for field in fields
+ }
+ if "_id" not in output:
+ output["_id"] = True
+
+ if required_fields:
+ for key in required_fields:
+ output[key] = True
+ return output
+
+
+def convert_id(in_id):
+ """Helper function for conversion of id from string to ObjectId.
+
+ Args:
+ in_id (Union[str, ObjectId, Any]): Entity id that should be converted
+ to right type for queries.
+
+ Returns:
+ Union[ObjectId, Any]: Converted ids to ObjectId or in type.
+ """
+
+ if isinstance(in_id, six.string_types):
+ return ObjectId(in_id)
+ return in_id
+
+
+def convert_ids(in_ids):
+ """Helper function for conversion of ids from string to ObjectId.
+
+ Args:
+ in_ids (Iterable[Union[str, ObjectId, Any]]): List of entity ids that
+ should be converted to right type for queries.
+
+ Returns:
+ List[ObjectId]: Converted ids to ObjectId.
+ """
+
+ _output = set()
+ for in_id in in_ids:
+ if in_id is not None:
+ _output.add(convert_id(in_id))
+ return list(_output)
+
+
+def get_projects(active=True, inactive=False, fields=None):
+ """Yield all project entity documents.
+
+ Args:
+ active (Optional[bool]): Include active projects. Defaults to True.
+ inactive (Optional[bool]): Include inactive projects.
+ Defaults to False.
+ fields (Optional[Iterable[str]]): Fields that should be returned. All
+ fields are returned if 'None' is passed.
+
+ Yields:
+ dict: Project entity data which can be reduced to specified 'fields'.
+ None is returned if project with specified filters was not found.
+ """
+ mongodb = get_project_database()
+ for project_name in mongodb.collection_names():
+ if project_name in ("system.indexes",):
+ continue
+ project_doc = get_project(
+ project_name, active=active, inactive=inactive, fields=fields
+ )
+ if project_doc is not None:
+ yield project_doc
+
+
+def get_project(project_name, active=True, inactive=True, fields=None):
+ """Return project entity document by project name.
+
+ Args:
+ project_name (str): Name of project.
+ active (Optional[bool]): Allow active project. Defaults to True.
+ inactive (Optional[bool]): Allow inactive project. Defaults to True.
+ fields (Optional[Iterable[str]]): Fields that should be returned. All
+ fields are returned if 'None' is passed.
+
+ Returns:
+ Union[Dict, None]: Project entity data which can be reduced to
+ specified 'fields'. None is returned if project with specified
+ filters was not found.
+ """
+ # Skip if both are disabled
+ if not active and not inactive:
+ return None
+
+ query_filter = {"type": "project"}
+ # Keep query untouched if both should be available
+ if active and inactive:
+ pass
+
+ # Add filter to keep only active
+ elif active:
+ query_filter["$or"] = [
+ {"data.active": {"$exists": False}},
+ {"data.active": True},
+ ]
+
+ # Add filter to keep only inactive
+ elif inactive:
+ query_filter["$or"] = [
+ {"data.active": {"$exists": False}},
+ {"data.active": False},
+ ]
+
+ conn = get_project_connection(project_name)
+ return conn.find_one(query_filter, _prepare_fields(fields))
+
+
+def get_whole_project(project_name):
+ """Receive all documents from project.
+
+ Helper that can be used to get all document from whole project. For example
+ for backups etc.
+
+ Returns:
+ Cursor: Query cursor as iterable which returns all documents from
+ project collection.
+ """
+
+ conn = get_project_connection(project_name)
+ return conn.find({})
+
+
+def get_asset_by_id(project_name, asset_id, fields=None):
+ """Receive asset data by its id.
+
+ Args:
+ project_name (str): Name of project where to look for queried entities.
+ asset_id (Union[str, ObjectId]): Asset's id.
+ fields (Optional[Iterable[str]]): Fields that should be returned. All
+ fields are returned if 'None' is passed.
+
+ Returns:
+ Union[Dict, None]: Asset entity data which can be reduced to
+ specified 'fields'. None is returned if asset with specified
+ filters was not found.
+ """
+
+ asset_id = convert_id(asset_id)
+ if not asset_id:
+ return None
+
+ query_filter = {"type": "asset", "_id": asset_id}
+ conn = get_project_connection(project_name)
+ return conn.find_one(query_filter, _prepare_fields(fields))
+
+
+def get_asset_by_name(project_name, asset_name, fields=None):
+ """Receive asset data by its name.
+
+ Args:
+ project_name (str): Name of project where to look for queried entities.
+ asset_name (str): Asset's name.
+ fields (Optional[Iterable[str]]): Fields that should be returned. All
+ fields are returned if 'None' is passed.
+
+ Returns:
+ Union[Dict, None]: Asset entity data which can be reduced to
+ specified 'fields'. None is returned if asset with specified
+ filters was not found.
+ """
+
+ if not asset_name:
+ return None
+
+ query_filter = {"type": "asset", "name": asset_name}
+ conn = get_project_connection(project_name)
+ return conn.find_one(query_filter, _prepare_fields(fields))
+
+
+# NOTE this could be just public function?
+# - any better variable name instead of 'standard'?
+# - same approach can be used for rest of types
+def _get_assets(
+ project_name,
+ asset_ids=None,
+ asset_names=None,
+ parent_ids=None,
+ standard=True,
+ archived=False,
+ fields=None
+):
+ """Assets for specified project by passed filters.
+
+ Passed filters (ids and names) are always combined so all conditions must
+ match.
+
+ To receive all assets from project just keep filters empty.
+
+ Args:
+ project_name (str): Name of project where to look for queried entities.
+ asset_ids (Iterable[Union[str, ObjectId]]): Asset ids that should
+ be found.
+ asset_names (Iterable[str]): Name assets that should be found.
+ parent_ids (Iterable[Union[str, ObjectId]]): Parent asset ids.
+ standard (bool): Query standard assets (type 'asset').
+ archived (bool): Query archived assets (type 'archived_asset').
+ fields (Optional[Iterable[str]]): Fields that should be returned. All
+ fields are returned if 'None' is passed.
+
+ Returns:
+ Cursor: Query cursor as iterable which returns asset documents matching
+ passed filters.
+ """
+
+ asset_types = []
+ if standard:
+ asset_types.append("asset")
+ if archived:
+ asset_types.append("archived_asset")
+
+ if not asset_types:
+ return []
+
+ if len(asset_types) == 1:
+ query_filter = {"type": asset_types[0]}
+ else:
+ query_filter = {"type": {"$in": asset_types}}
+
+ if asset_ids is not None:
+ asset_ids = convert_ids(asset_ids)
+ if not asset_ids:
+ return []
+ query_filter["_id"] = {"$in": asset_ids}
+
+ if asset_names is not None:
+ if not asset_names:
+ return []
+ query_filter["name"] = {"$in": list(asset_names)}
+
+ if parent_ids is not None:
+ parent_ids = convert_ids(parent_ids)
+ if not parent_ids:
+ return []
+ query_filter["data.visualParent"] = {"$in": parent_ids}
+
+ conn = get_project_connection(project_name)
+
+ return conn.find(query_filter, _prepare_fields(fields))
+
+
+def get_assets(
+ project_name,
+ asset_ids=None,
+ asset_names=None,
+ parent_ids=None,
+ archived=False,
+ fields=None
+):
+ """Assets for specified project by passed filters.
+
+ Passed filters (ids and names) are always combined so all conditions must
+ match.
+
+ To receive all assets from project just keep filters empty.
+
+ Args:
+ project_name (str): Name of project where to look for queried entities.
+ asset_ids (Iterable[Union[str, ObjectId]]): Asset ids that should
+ be found.
+ asset_names (Iterable[str]): Name assets that should be found.
+ parent_ids (Iterable[Union[str, ObjectId]]): Parent asset ids.
+ archived (bool): Add also archived assets.
+ fields (Optional[Iterable[str]]): Fields that should be returned. All
+ fields are returned if 'None' is passed.
+
+ Returns:
+ Cursor: Query cursor as iterable which returns asset documents matching
+ passed filters.
+ """
+
+ return _get_assets(
+ project_name,
+ asset_ids,
+ asset_names,
+ parent_ids,
+ True,
+ archived,
+ fields
+ )
+
+
+def get_archived_assets(
+ project_name,
+ asset_ids=None,
+ asset_names=None,
+ parent_ids=None,
+ fields=None
+):
+ """Archived assets for specified project by passed filters.
+
+ Passed filters (ids and names) are always combined so all conditions must
+ match.
+
+ To receive all archived assets from project just keep filters empty.
+
+ Args:
+ project_name (str): Name of project where to look for queried entities.
+ asset_ids (Iterable[Union[str, ObjectId]]): Asset ids that should
+ be found.
+ asset_names (Iterable[str]): Name assets that should be found.
+ parent_ids (Iterable[Union[str, ObjectId]]): Parent asset ids.
+ fields (Optional[Iterable[str]]): Fields that should be returned. All
+ fields are returned if 'None' is passed.
+
+ Returns:
+ Cursor: Query cursor as iterable which returns asset documents matching
+ passed filters.
+ """
+
+ return _get_assets(
+ project_name, asset_ids, asset_names, parent_ids, False, True, fields
+ )
+
+
+def get_asset_ids_with_subsets(project_name, asset_ids=None):
+ """Find out which assets have existing subsets.
+
+ Args:
+ project_name (str): Name of project where to look for queried entities.
+ asset_ids (Iterable[Union[str, ObjectId]]): Look only for entered
+ asset ids.
+
+ Returns:
+ Iterable[ObjectId]: Asset ids that have existing subsets.
+ """
+
+ subset_query = {
+ "type": "subset"
+ }
+ if asset_ids is not None:
+ asset_ids = convert_ids(asset_ids)
+ if not asset_ids:
+ return []
+ subset_query["parent"] = {"$in": asset_ids}
+
+ conn = get_project_connection(project_name)
+ result = conn.aggregate([
+ {
+ "$match": subset_query
+ },
+ {
+ "$group": {
+ "_id": "$parent",
+ "count": {"$sum": 1}
+ }
+ }
+ ])
+ asset_ids_with_subsets = []
+ for item in result:
+ asset_id = item["_id"]
+ count = item["count"]
+ if count > 0:
+ asset_ids_with_subsets.append(asset_id)
+ return asset_ids_with_subsets
+
+
+def get_subset_by_id(project_name, subset_id, fields=None):
+ """Single subset entity data by its id.
+
+ Args:
+ project_name (str): Name of project where to look for queried entities.
+ subset_id (Union[str, ObjectId]): Id of subset which should be found.
+ fields (Optional[Iterable[str]]): Fields that should be returned. All
+ fields are returned if 'None' is passed.
+
+ Returns:
+ Union[Dict, None]: Subset entity data which can be reduced to
+ specified 'fields'. None is returned if subset with specified
+ filters was not found.
+ """
+
+ subset_id = convert_id(subset_id)
+ if not subset_id:
+ return None
+
+ query_filters = {"type": "subset", "_id": subset_id}
+ conn = get_project_connection(project_name)
+ return conn.find_one(query_filters, _prepare_fields(fields))
+
+
+def get_subset_by_name(project_name, subset_name, asset_id, fields=None):
+ """Single subset entity data by its name and its version id.
+
+ Args:
+ project_name (str): Name of project where to look for queried entities.
+ subset_name (str): Name of subset.
+ asset_id (Union[str, ObjectId]): Id of parent asset.
+ fields (Optional[Iterable[str]]): Fields that should be returned. All
+ fields are returned if 'None' is passed.
+
+ Returns:
+ Union[Dict, None]: Subset entity data which can be reduced to
+ specified 'fields'. None is returned if subset with specified
+ filters was not found.
+ """
+ if not subset_name:
+ return None
+
+ asset_id = convert_id(asset_id)
+ if not asset_id:
+ return None
+
+ query_filters = {
+ "type": "subset",
+ "name": subset_name,
+ "parent": asset_id
+ }
+ conn = get_project_connection(project_name)
+ return conn.find_one(query_filters, _prepare_fields(fields))
+
+
+def get_subsets(
+ project_name,
+ subset_ids=None,
+ subset_names=None,
+ asset_ids=None,
+ names_by_asset_ids=None,
+ archived=False,
+ fields=None
+):
+ """Subset entities data from one project filtered by entered filters.
+
+ Filters are additive (all conditions must pass to return subset).
+
+ Args:
+ project_name (str): Name of project where to look for queried entities.
+ subset_ids (Iterable[Union[str, ObjectId]]): Subset ids that should be
+ queried. Filter ignored if 'None' is passed.
+ subset_names (Iterable[str]): Subset names that should be queried.
+ Filter ignored if 'None' is passed.
+ asset_ids (Iterable[Union[str, ObjectId]]): Asset ids under which
+ should look for the subsets. Filter ignored if 'None' is passed.
+ names_by_asset_ids (dict[ObjectId, List[str]]): Complex filtering
+ using asset ids and list of subset names under the asset.
+ archived (bool): Look for archived subsets too.
+ fields (Optional[Iterable[str]]): Fields that should be returned. All
+ fields are returned if 'None' is passed.
+
+ Returns:
+ Cursor: Iterable cursor yielding all matching subsets.
+ """
+
+ subset_types = ["subset"]
+ if archived:
+ subset_types.append("archived_subset")
+
+ if len(subset_types) == 1:
+ query_filter = {"type": subset_types[0]}
+ else:
+ query_filter = {"type": {"$in": subset_types}}
+
+ if asset_ids is not None:
+ asset_ids = convert_ids(asset_ids)
+ if not asset_ids:
+ return []
+ query_filter["parent"] = {"$in": asset_ids}
+
+ if subset_ids is not None:
+ subset_ids = convert_ids(subset_ids)
+ if not subset_ids:
+ return []
+ query_filter["_id"] = {"$in": subset_ids}
+
+ if subset_names is not None:
+ if not subset_names:
+ return []
+ query_filter["name"] = {"$in": list(subset_names)}
+
+ if names_by_asset_ids is not None:
+ or_query = []
+ for asset_id, names in names_by_asset_ids.items():
+ if asset_id and names:
+ or_query.append({
+ "parent": convert_id(asset_id),
+ "name": {"$in": list(names)}
+ })
+ if not or_query:
+ return []
+ query_filter["$or"] = or_query
+
+ conn = get_project_connection(project_name)
+ return conn.find(query_filter, _prepare_fields(fields))
+
+
+def get_subset_families(project_name, subset_ids=None):
+ """Set of main families of subsets.
+
+ Args:
+ project_name (str): Name of project where to look for queried entities.
+ subset_ids (Iterable[Union[str, ObjectId]]): Subset ids that should
+ be queried. All subsets from project are used if 'None' is passed.
+
+ Returns:
+ set[str]: Main families of matching subsets.
+ """
+
+ subset_filter = {
+ "type": "subset"
+ }
+ if subset_ids is not None:
+ if not subset_ids:
+ return set()
+ subset_filter["_id"] = {"$in": list(subset_ids)}
+
+ conn = get_project_connection(project_name)
+ result = list(conn.aggregate([
+ {"$match": subset_filter},
+ {"$project": {
+ "family": {"$arrayElemAt": ["$data.families", 0]}
+ }},
+ {"$group": {
+ "_id": "family_group",
+ "families": {"$addToSet": "$family"}
+ }}
+ ]))
+ if result:
+ return set(result[0]["families"])
+ return set()
+
+
+def get_version_by_id(project_name, version_id, fields=None):
+ """Single version entity data by its id.
+
+ Args:
+ project_name (str): Name of project where to look for queried entities.
+ version_id (Union[str, ObjectId]): Id of version which should be found.
+ fields (Optional[Iterable[str]]): Fields that should be returned. All
+ fields are returned if 'None' is passed.
+
+ Returns:
+ Union[Dict, None]: Version entity data which can be reduced to
+ specified 'fields'. None is returned if version with specified
+ filters was not found.
+ """
+
+ version_id = convert_id(version_id)
+ if not version_id:
+ return None
+
+ query_filter = {
+ "type": {"$in": ["version", "hero_version"]},
+ "_id": version_id
+ }
+ conn = get_project_connection(project_name)
+ return conn.find_one(query_filter, _prepare_fields(fields))
+
+
+def get_version_by_name(project_name, version, subset_id, fields=None):
+ """Single version entity data by its name and subset id.
+
+ Args:
+ project_name (str): Name of project where to look for queried entities.
+ version (int): name of version entity (its version).
+ subset_id (Union[str, ObjectId]): Id of version which should be found.
+ fields (Optional[Iterable[str]]): Fields that should be returned. All
+ fields are returned if 'None' is passed.
+
+ Returns:
+ Union[Dict, None]: Version entity data which can be reduced to
+ specified 'fields'. None is returned if version with specified
+ filters was not found.
+ """
+
+ subset_id = convert_id(subset_id)
+ if not subset_id:
+ return None
+
+ conn = get_project_connection(project_name)
+ query_filter = {
+ "type": "version",
+ "parent": subset_id,
+ "name": version
+ }
+ return conn.find_one(query_filter, _prepare_fields(fields))
+
+
+def version_is_latest(project_name, version_id):
+ """Is version the latest from its subset.
+
+ Note:
+ Hero versions are considered as latest.
+
+ Todo:
+ Maybe raise exception when version was not found?
+
+ Args:
+ project_name (str):Name of project where to look for queried entities.
+ version_id (Union[str, ObjectId]): Version id which is checked.
+
+ Returns:
+ bool: True if is latest version from subset else False.
+ """
+
+ version_id = convert_id(version_id)
+ if not version_id:
+ return False
+ version_doc = get_version_by_id(
+ project_name, version_id, fields=["_id", "type", "parent"]
+ )
+ # What to do when version is not found?
+ if not version_doc:
+ return False
+
+ if version_doc["type"] == "hero_version":
+ return True
+
+ last_version = get_last_version_by_subset_id(
+ project_name, version_doc["parent"], fields=["_id"]
+ )
+ return last_version["_id"] == version_id
+
+
+def _get_versions(
+ project_name,
+ subset_ids=None,
+ version_ids=None,
+ versions=None,
+ standard=True,
+ hero=False,
+ fields=None
+):
+ version_types = []
+ if standard:
+ version_types.append("version")
+
+ if hero:
+ version_types.append("hero_version")
+
+ if not version_types:
+ return []
+ elif len(version_types) == 1:
+ query_filter = {"type": version_types[0]}
+ else:
+ query_filter = {"type": {"$in": version_types}}
+
+ if subset_ids is not None:
+ subset_ids = convert_ids(subset_ids)
+ if not subset_ids:
+ return []
+ query_filter["parent"] = {"$in": subset_ids}
+
+ if version_ids is not None:
+ version_ids = convert_ids(version_ids)
+ if not version_ids:
+ return []
+ query_filter["_id"] = {"$in": version_ids}
+
+ if versions is not None:
+ versions = list(versions)
+ if not versions:
+ return []
+
+ if len(versions) == 1:
+ query_filter["name"] = versions[0]
+ else:
+ query_filter["name"] = {"$in": versions}
+
+ conn = get_project_connection(project_name)
+
+ return conn.find(query_filter, _prepare_fields(fields))
+
+
+def get_versions(
+ project_name,
+ version_ids=None,
+ subset_ids=None,
+ versions=None,
+ hero=False,
+ fields=None
+):
+ """Version entities data from one project filtered by entered filters.
+
+ Filters are additive (all conditions must pass to return subset).
+
+ Args:
+ project_name (str): Name of project where to look for queried entities.
+ version_ids (Iterable[Union[str, ObjectId]]): Version ids that will
+ be queried. Filter ignored if 'None' is passed.
+ subset_ids (Iterable[str]): Subset ids that will be queried.
+ Filter ignored if 'None' is passed.
+ versions (Iterable[int]): Version names (as integers).
+ Filter ignored if 'None' is passed.
+ hero (bool): Look also for hero versions.
+ fields (Optional[Iterable[str]]): Fields that should be returned. All
+ fields are returned if 'None' is passed.
+
+ Returns:
+ Cursor: Iterable cursor yielding all matching versions.
+ """
+
+ return _get_versions(
+ project_name,
+ subset_ids,
+ version_ids,
+ versions,
+ standard=True,
+ hero=hero,
+ fields=fields
+ )
+
+
+def get_hero_version_by_subset_id(project_name, subset_id, fields=None):
+ """Hero version by subset id.
+
+ Args:
+ project_name (str): Name of project where to look for queried entities.
+ subset_id (Union[str, ObjectId]): Subset id under which
+ is hero version.
+ fields (Optional[Iterable[str]]): Fields that should be returned. All
+ fields are returned if 'None' is passed.
+
+ Returns:
+ Union[Dict, None]: Hero version entity data which can be reduced to
+ specified 'fields'. None is returned if hero version with specified
+ filters was not found.
+ """
+
+ subset_id = convert_id(subset_id)
+ if not subset_id:
+ return None
+
+ versions = list(_get_versions(
+ project_name,
+ subset_ids=[subset_id],
+ standard=False,
+ hero=True,
+ fields=fields
+ ))
+ if versions:
+ return versions[0]
+ return None
+
+
+def get_hero_version_by_id(project_name, version_id, fields=None):
+ """Hero version by its id.
+
+ Args:
+ project_name (str): Name of project where to look for queried entities.
+ version_id (Union[str, ObjectId]): Hero version id.
+ fields (Optional[Iterable[str]]): Fields that should be returned. All
+ fields are returned if 'None' is passed.
+
+ Returns:
+ Union[Dict, None]: Hero version entity data which can be reduced to
+ specified 'fields'. None is returned if hero version with specified
+ filters was not found.
+ """
+
+ version_id = convert_id(version_id)
+ if not version_id:
+ return None
+
+ versions = list(_get_versions(
+ project_name,
+ version_ids=[version_id],
+ standard=False,
+ hero=True,
+ fields=fields
+ ))
+ if versions:
+ return versions[0]
+ return None
+
+
+def get_hero_versions(
+ project_name,
+ subset_ids=None,
+ version_ids=None,
+ fields=None
+):
+ """Hero version entities data from one project filtered by entered filters.
+
+ Args:
+ project_name (str): Name of project where to look for queried entities.
+ subset_ids (Iterable[Union[str, ObjectId]]): Subset ids for which
+ should look for hero versions. Filter ignored if 'None' is passed.
+ version_ids (Iterable[Union[str, ObjectId]]): Hero version ids. Filter
+ ignored if 'None' is passed.
+ fields (Optional[Iterable[str]]): Fields that should be returned. All
+ fields are returned if 'None' is passed.
+
+ Returns:
+ Cursor|list: Iterable yielding hero versions matching passed filters.
+ """
+
+ return _get_versions(
+ project_name,
+ subset_ids,
+ version_ids,
+ standard=False,
+ hero=True,
+ fields=fields
+ )
+
+
+def get_output_link_versions(project_name, version_id, fields=None):
+ """Versions where passed version was used as input.
+
+ Question:
+ Not 100% sure about the usage of the function so the name and docstring
+ maybe does not match what it does?
+
+ Args:
+ project_name (str): Name of project where to look for queried entities.
+ version_id (Union[str, ObjectId]): Version id which can be used
+ as input link for other versions.
+ fields (Optional[Iterable[str]]): Fields that should be returned. All
+ fields are returned if 'None' is passed.
+
+ Returns:
+ Iterable: Iterable cursor yielding versions that are used as input
+ links for passed version.
+ """
+
+ version_id = convert_id(version_id)
+ if not version_id:
+ return []
+
+ conn = get_project_connection(project_name)
+ # Does make sense to look for hero versions?
+ query_filter = {
+ "type": "version",
+ "data.inputLinks.id": version_id
+ }
+ return conn.find(query_filter, _prepare_fields(fields))
+
+
+def get_last_versions(project_name, subset_ids, active=None, fields=None):
+ """Latest versions for entered subset_ids.
+
+ Args:
+ project_name (str): Name of project where to look for queried entities.
+ subset_ids (Iterable[Union[str, ObjectId]]): List of subset ids.
+ active (Optional[bool]): If True only active versions are returned.
+ fields (Optional[Iterable[str]]): Fields that should be returned. All
+ fields are returned if 'None' is passed.
+
+ Returns:
+ dict[ObjectId, int]: Key is subset id and value is last version name.
+ """
+
+ subset_ids = convert_ids(subset_ids)
+ if not subset_ids:
+ return {}
+
+ if fields is not None:
+ fields = list(fields)
+ if not fields:
+ return {}
+
+ # Avoid double query if only name and _id are requested
+ name_needed = False
+ limit_query = False
+ if fields:
+ fields_s = set(fields)
+ if "name" in fields_s:
+ name_needed = True
+ fields_s.remove("name")
+
+ for field in ("_id", "parent"):
+ if field in fields_s:
+ fields_s.remove(field)
+ limit_query = len(fields_s) == 0
+
+ group_item = {
+ "_id": "$parent",
+ "_version_id": {"$last": "$_id"}
+ }
+ # Add name if name is needed (only for limit query)
+ if name_needed:
+ group_item["name"] = {"$last": "$name"}
+
+ aggregate_filter = {
+ "type": "version",
+ "parent": {"$in": subset_ids}
+ }
+ if active is False:
+ aggregate_filter["data.active"] = active
+ elif active is True:
+ aggregate_filter["$or"] = [
+ {"data.active": {"$exists": 0}},
+ {"data.active": active},
+ ]
+
+ aggregation_pipeline = [
+ # Find all versions of those subsets
+ {"$match": aggregate_filter},
+ # Sorting versions all together
+ {"$sort": {"name": 1}},
+ # Group them by "parent", but only take the last
+ {"$group": group_item}
+ ]
+
+ conn = get_project_connection(project_name)
+ aggregate_result = conn.aggregate(aggregation_pipeline)
+ if limit_query:
+ output = {}
+ for item in aggregate_result:
+ subset_id = item["_id"]
+ item_data = {"_id": item["_version_id"], "parent": subset_id}
+ if name_needed:
+ item_data["name"] = item["name"]
+ output[subset_id] = item_data
+ return output
+
+ version_ids = [
+ doc["_version_id"]
+ for doc in aggregate_result
+ ]
+
+ fields = _prepare_fields(fields, ["parent"])
+
+ version_docs = get_versions(
+ project_name, version_ids=version_ids, fields=fields
+ )
+
+ return {
+ version_doc["parent"]: version_doc
+ for version_doc in version_docs
+ }
+
+
+def get_last_version_by_subset_id(project_name, subset_id, fields=None):
+ """Last version for passed subset id.
+
+ Args:
+ project_name (str): Name of project where to look for queried entities.
+ subset_id (Union[str, ObjectId]): Id of version which should be found.
+ fields (Optional[Iterable[str]]): Fields that should be returned. All
+ fields are returned if 'None' is passed.
+
+ Returns:
+ Union[Dict, None]: Version entity data which can be reduced to
+ specified 'fields'. None is returned if version with specified
+ filters was not found.
+ """
+
+ subset_id = convert_id(subset_id)
+ if not subset_id:
+ return None
+
+ last_versions = get_last_versions(
+ project_name, subset_ids=[subset_id], fields=fields
+ )
+ return last_versions.get(subset_id)
+
+
+def get_last_version_by_subset_name(
+ project_name, subset_name, asset_id=None, asset_name=None, fields=None
+):
+ """Last version for passed subset name under asset id/name.
+
+ It is required to pass 'asset_id' or 'asset_name'. Asset id is recommended
+ if is available.
+
+ Args:
+ project_name (str): Name of project where to look for queried entities.
+ subset_name (str): Name of subset.
+ asset_id (Union[str, ObjectId]): Asset id which is parent of passed
+ subset name.
+ asset_name (str): Asset name which is parent of passed subset name.
+ fields (Optional[Iterable[str]]): Fields that should be returned. All
+ fields are returned if 'None' is passed.
+
+ Returns:
+ Union[Dict, None]: Version entity data which can be reduced to
+ specified 'fields'. None is returned if version with specified
+ filters was not found.
+ """
+
+ if not asset_id and not asset_name:
+ return None
+
+ if not asset_id:
+ asset_doc = get_asset_by_name(project_name, asset_name, fields=["_id"])
+ if not asset_doc:
+ return None
+ asset_id = asset_doc["_id"]
+ subset_doc = get_subset_by_name(
+ project_name, subset_name, asset_id, fields=["_id"]
+ )
+ if not subset_doc:
+ return None
+ return get_last_version_by_subset_id(
+ project_name, subset_doc["_id"], fields=fields
+ )
+
+
+def get_representation_by_id(project_name, representation_id, fields=None):
+ """Representation entity data by its id.
+
+ Args:
+ project_name (str): Name of project where to look for queried entities.
+ representation_id (Union[str, ObjectId]): Representation id.
+ fields (Optional[Iterable[str]]): Fields that should be returned. All
+ fields are returned if 'None' is passed.
+
+ Returns:
+ Union[Dict, None]: Representation entity data which can be reduced to
+ specified 'fields'. None is returned if representation with
+ specified filters was not found.
+ """
+
+ if not representation_id:
+ return None
+
+ repre_types = ["representation", "archived_representation"]
+ query_filter = {
+ "type": {"$in": repre_types}
+ }
+ if representation_id is not None:
+ query_filter["_id"] = convert_id(representation_id)
+
+ conn = get_project_connection(project_name)
+
+ return conn.find_one(query_filter, _prepare_fields(fields))
+
+
+def get_representation_by_name(
+ project_name, representation_name, version_id, fields=None
+):
+ """Representation entity data by its name and its version id.
+
+ Args:
+ project_name (str): Name of project where to look for queried entities.
+ representation_name (str): Representation name.
+ version_id (Union[str, ObjectId]): Id of parent version entity.
+ fields (Optional[Iterable[str]]): Fields that should be returned. All
+ fields are returned if 'None' is passed.
+
+ Returns:
+ Union[dict[str, Any], None]: Representation entity data which can be
+ reduced to specified 'fields'. None is returned if representation
+ with specified filters was not found.
+ """
+
+ version_id = convert_id(version_id)
+ if not version_id or not representation_name:
+ return None
+ repre_types = ["representation", "archived_representations"]
+ query_filter = {
+ "type": {"$in": repre_types},
+ "name": representation_name,
+ "parent": version_id
+ }
+
+ conn = get_project_connection(project_name)
+ return conn.find_one(query_filter, _prepare_fields(fields))
+
+
+def _flatten_dict(data):
+ flatten_queue = collections.deque()
+ flatten_queue.append(data)
+ output = {}
+ while flatten_queue:
+ item = flatten_queue.popleft()
+ for key, value in item.items():
+ if not isinstance(value, dict):
+ output[key] = value
+ continue
+
+ tmp = {}
+ for subkey, subvalue in value.items():
+ new_key = "{}.{}".format(key, subkey)
+ tmp[new_key] = subvalue
+ flatten_queue.append(tmp)
+ return output
+
+
+def _regex_filters(filters):
+ output = []
+ for key, value in filters.items():
+ regexes = []
+ a_values = []
+ if isinstance(value, PatternType):
+ regexes.append(value)
+ elif isinstance(value, (list, tuple, set)):
+ for item in value:
+ if isinstance(item, PatternType):
+ regexes.append(item)
+ else:
+ a_values.append(item)
+ else:
+ a_values.append(value)
+
+ key_filters = []
+ if len(a_values) == 1:
+ key_filters.append({key: a_values[0]})
+ elif a_values:
+ key_filters.append({key: {"$in": a_values}})
+
+ for regex in regexes:
+ key_filters.append({key: {"$regex": regex}})
+
+ if len(key_filters) == 1:
+ output.append(key_filters[0])
+ else:
+ output.append({"$or": key_filters})
+
+ return output
+
+
+def _get_representations(
+ project_name,
+ representation_ids,
+ representation_names,
+ version_ids,
+ context_filters,
+ names_by_version_ids,
+ standard,
+ archived,
+ fields
+):
+ default_output = []
+ repre_types = []
+ if standard:
+ repre_types.append("representation")
+ if archived:
+ repre_types.append("archived_representation")
+
+ if not repre_types:
+ return default_output
+
+ if len(repre_types) == 1:
+ query_filter = {"type": repre_types[0]}
+ else:
+ query_filter = {"type": {"$in": repre_types}}
+
+ if representation_ids is not None:
+ representation_ids = convert_ids(representation_ids)
+ if not representation_ids:
+ return default_output
+ query_filter["_id"] = {"$in": representation_ids}
+
+ if representation_names is not None:
+ if not representation_names:
+ return default_output
+ query_filter["name"] = {"$in": list(representation_names)}
+
+ if version_ids is not None:
+ version_ids = convert_ids(version_ids)
+ if not version_ids:
+ return default_output
+ query_filter["parent"] = {"$in": version_ids}
+
+ or_queries = []
+ if names_by_version_ids is not None:
+ or_query = []
+ for version_id, names in names_by_version_ids.items():
+ if version_id and names:
+ or_query.append({
+ "parent": convert_id(version_id),
+ "name": {"$in": list(names)}
+ })
+ if not or_query:
+ return default_output
+ or_queries.append(or_query)
+
+ if context_filters is not None:
+ if not context_filters:
+ return []
+ _flatten_filters = _flatten_dict(context_filters)
+ flatten_filters = {}
+ for key, value in _flatten_filters.items():
+ if not key.startswith("context"):
+ key = "context.{}".format(key)
+ flatten_filters[key] = value
+
+ for item in _regex_filters(flatten_filters):
+ for key, value in item.items():
+ if key != "$or":
+ query_filter[key] = value
+
+ elif value:
+ or_queries.append(value)
+
+ if len(or_queries) == 1:
+ query_filter["$or"] = or_queries[0]
+ elif or_queries:
+ and_query = []
+ for or_query in or_queries:
+ if isinstance(or_query, list):
+ or_query = {"$or": or_query}
+ and_query.append(or_query)
+ query_filter["$and"] = and_query
+
+ conn = get_project_connection(project_name)
+
+ return conn.find(query_filter, _prepare_fields(fields))
+
+
+def get_representations(
+ project_name,
+ representation_ids=None,
+ representation_names=None,
+ version_ids=None,
+ context_filters=None,
+ names_by_version_ids=None,
+ archived=False,
+ standard=True,
+ fields=None
+):
+ """Representation entities data from one project filtered by filters.
+
+ Filters are additive (all conditions must pass to return subset).
+
+ Args:
+ project_name (str): Name of project where to look for queried entities.
+ representation_ids (Iterable[Union[str, ObjectId]]): Representation ids
+ used as filter. Filter ignored if 'None' is passed.
+ representation_names (Iterable[str]): Representations names used
+ as filter. Filter ignored if 'None' is passed.
+ version_ids (Iterable[str]): Subset ids used as parent filter. Filter
+ ignored if 'None' is passed.
+ context_filters (Dict[str, List[str, PatternType]]): Filter by
+ representation context fields.
+ names_by_version_ids (dict[ObjectId, list[str]]): Complex filtering
+ using version ids and list of names under the version.
+ archived (bool): Output will also contain archived representations.
+ fields (Optional[Iterable[str]]): Fields that should be returned. All
+ fields are returned if 'None' is passed.
+
+ Returns:
+ Cursor: Iterable cursor yielding all matching representations.
+ """
+
+ return _get_representations(
+ project_name=project_name,
+ representation_ids=representation_ids,
+ representation_names=representation_names,
+ version_ids=version_ids,
+ context_filters=context_filters,
+ names_by_version_ids=names_by_version_ids,
+ standard=standard,
+ archived=archived,
+ fields=fields
+ )
+
+
+def get_archived_representations(
+ project_name,
+ representation_ids=None,
+ representation_names=None,
+ version_ids=None,
+ context_filters=None,
+ names_by_version_ids=None,
+ fields=None
+):
+ """Archived representation entities data from project with applied filters.
+
+ Filters are additive (all conditions must pass to return subset).
+
+ Args:
+ project_name (str): Name of project where to look for queried entities.
+ representation_ids (Iterable[Union[str, ObjectId]]): Representation ids
+ used as filter. Filter ignored if 'None' is passed.
+ representation_names (Iterable[str]): Representations names used
+ as filter. Filter ignored if 'None' is passed.
+ version_ids (Iterable[str]): Subset ids used as parent filter. Filter
+ ignored if 'None' is passed.
+ context_filters (Dict[str, List[str, PatternType]]): Filter by
+ representation context fields.
+ names_by_version_ids (dict[ObjectId, List[str]]): Complex filtering
+ using version ids and list of names under the version.
+ fields (Optional[Iterable[str]]): Fields that should be returned. All
+ fields are returned if 'None' is passed.
+
+ Returns:
+ Cursor: Iterable cursor yielding all matching representations.
+ """
+
+ return _get_representations(
+ project_name=project_name,
+ representation_ids=representation_ids,
+ representation_names=representation_names,
+ version_ids=version_ids,
+ context_filters=context_filters,
+ names_by_version_ids=names_by_version_ids,
+ standard=False,
+ archived=True,
+ fields=fields
+ )
+
+
+def get_representations_parents(project_name, representations):
+ """Prepare parents of representation entities.
+
+ Each item of returned dictionary contains version, subset, asset
+ and project in that order.
+
+ Args:
+ project_name (str): Name of project where to look for queried entities.
+ representations (List[dict]): Representation entities with at least
+ '_id' and 'parent' keys.
+
+ Returns:
+ dict[ObjectId, tuple]: Parents by representation id.
+ """
+
+ repre_docs_by_version_id = collections.defaultdict(list)
+ version_docs_by_version_id = {}
+ version_docs_by_subset_id = collections.defaultdict(list)
+ subset_docs_by_subset_id = {}
+ subset_docs_by_asset_id = collections.defaultdict(list)
+ output = {}
+ for repre_doc in representations:
+ repre_id = repre_doc["_id"]
+ version_id = repre_doc["parent"]
+ output[repre_id] = (None, None, None, None)
+ repre_docs_by_version_id[version_id].append(repre_doc)
+
+ version_docs = get_versions(
+ project_name,
+ version_ids=repre_docs_by_version_id.keys(),
+ hero=True
+ )
+ for version_doc in version_docs:
+ version_id = version_doc["_id"]
+ subset_id = version_doc["parent"]
+ version_docs_by_version_id[version_id] = version_doc
+ version_docs_by_subset_id[subset_id].append(version_doc)
+
+ subset_docs = get_subsets(
+ project_name, subset_ids=version_docs_by_subset_id.keys()
+ )
+ for subset_doc in subset_docs:
+ subset_id = subset_doc["_id"]
+ asset_id = subset_doc["parent"]
+ subset_docs_by_subset_id[subset_id] = subset_doc
+ subset_docs_by_asset_id[asset_id].append(subset_doc)
+
+ asset_docs = get_assets(
+ project_name, asset_ids=subset_docs_by_asset_id.keys()
+ )
+ asset_docs_by_id = {
+ asset_doc["_id"]: asset_doc
+ for asset_doc in asset_docs
+ }
+
+ project_doc = get_project(project_name)
+
+ for version_id, repre_docs in repre_docs_by_version_id.items():
+ asset_doc = None
+ subset_doc = None
+ version_doc = version_docs_by_version_id.get(version_id)
+ if version_doc:
+ subset_id = version_doc["parent"]
+ subset_doc = subset_docs_by_subset_id.get(subset_id)
+ if subset_doc:
+ asset_id = subset_doc["parent"]
+ asset_doc = asset_docs_by_id.get(asset_id)
+
+ for repre_doc in repre_docs:
+ repre_id = repre_doc["_id"]
+ output[repre_id] = (
+ version_doc, subset_doc, asset_doc, project_doc
+ )
+ return output
+
+
+def get_representation_parents(project_name, representation):
+ """Prepare parents of representation entity.
+
+ Each item of returned dictionary contains version, subset, asset
+ and project in that order.
+
+ Args:
+ project_name (str): Name of project where to look for queried entities.
+ representation (dict): Representation entities with at least
+ '_id' and 'parent' keys.
+
+ Returns:
+ dict[ObjectId, tuple]: Parents by representation id.
+ """
+
+ if not representation:
+ return None
+
+ repre_id = representation["_id"]
+ parents_by_repre_id = get_representations_parents(
+ project_name, [representation]
+ )
+ return parents_by_repre_id[repre_id]
+
+
+def get_thumbnail_id_from_source(project_name, src_type, src_id):
+ """Receive thumbnail id from source entity.
+
+ Args:
+ project_name (str): Name of project where to look for queried entities.
+ src_type (str): Type of source entity ('asset', 'version').
+ src_id (Union[str, ObjectId]): Id of source entity.
+
+ Returns:
+ Union[ObjectId, None]: Thumbnail id assigned to entity. If Source
+ entity does not have any thumbnail id assigned.
+ """
+
+ if not src_type or not src_id:
+ return None
+
+ query_filter = {"_id": convert_id(src_id)}
+
+ conn = get_project_connection(project_name)
+ src_doc = conn.find_one(query_filter, {"data.thumbnail_id"})
+ if src_doc:
+ return src_doc.get("data", {}).get("thumbnail_id")
+ return None
+
+
+def get_thumbnails(project_name, thumbnail_ids, fields=None):
+ """Receive thumbnails entity data.
+
+ Thumbnail entity can be used to receive binary content of thumbnail based
+ on its content and ThumbnailResolvers.
+
+ Args:
+ project_name (str): Name of project where to look for queried entities.
+ thumbnail_ids (Iterable[Union[str, ObjectId]]): Ids of thumbnail
+ entities.
+ fields (Optional[Iterable[str]]): Fields that should be returned. All
+ fields are returned if 'None' is passed.
+
+ Returns:
+ cursor: Cursor of queried documents.
+ """
+
+ if thumbnail_ids:
+ thumbnail_ids = convert_ids(thumbnail_ids)
+
+ if not thumbnail_ids:
+ return []
+ query_filter = {
+ "type": "thumbnail",
+ "_id": {"$in": thumbnail_ids}
+ }
+ conn = get_project_connection(project_name)
+ return conn.find(query_filter, _prepare_fields(fields))
+
+
+def get_thumbnail(
+ project_name, thumbnail_id, entity_type, entity_id, fields=None
+):
+ """Receive thumbnail entity data.
+
+ Args:
+ project_name (str): Name of project where to look for queried entities.
+ thumbnail_id (Union[str, ObjectId]): Id of thumbnail entity.
+ fields (Optional[Iterable[str]]): Fields that should be returned. All
+ fields are returned if 'None' is passed.
+
+ Returns:
+ Union[Dict, None]: Thumbnail entity data which can be reduced to
+ specified 'fields'.None is returned if thumbnail with specified
+ filters was not found.
+ """
+
+ if not thumbnail_id:
+ return None
+ query_filter = {"type": "thumbnail", "_id": convert_id(thumbnail_id)}
+ conn = get_project_connection(project_name)
+ return conn.find_one(query_filter, _prepare_fields(fields))
+
+
+def get_workfile_info(
+ project_name, asset_id, task_name, filename, fields=None
+):
+ """Document with workfile information.
+
+ Warning:
+ Query is based on filename and context which does not meant it will
+ find always right and expected result. Information have limited usage
+ and is not recommended to use it as source information about workfile.
+
+ Args:
+ project_name (str): Name of project where to look for queried entities.
+ asset_id (Union[str, ObjectId]): Id of asset entity.
+ task_name (str): Task name on asset.
+ fields (Optional[Iterable[str]]): Fields that should be returned. All
+ fields are returned if 'None' is passed.
+
+ Returns:
+ Union[Dict, None]: Workfile entity data which can be reduced to
+ specified 'fields'.None is returned if workfile with specified
+ filters was not found.
+ """
+
+ if not asset_id or not task_name or not filename:
+ return None
+
+ query_filter = {
+ "type": "workfile",
+ "parent": convert_id(asset_id),
+ "task_name": task_name,
+ "filename": filename
+ }
+ conn = get_project_connection(project_name)
+ return conn.find_one(query_filter, _prepare_fields(fields))
+
+
+"""
+## Custom data storage:
+- Settings - OP settings overrides and local settings
+- Logging - logs from Logger
+- Webpublisher - jobs
+- Ftrack - events
+- Maya - Shaders
+ - openpype/hosts/maya/api/shader_definition_editor.py
+ - openpype/hosts/maya/plugins/publish/validate_model_name.py
+
+## Global publish plugins
+- openpype/plugins/publish/extract_hierarchy_avalon.py
+ Create:
+ - asset
+ Update:
+ - asset
+
+## Lib
+- openpype/lib/avalon_context.py
+ Update:
+ - workfile data
+- openpype/lib/project_backpack.py
+ Update:
+ - project
+"""
diff --git a/openpype/client/mongo/entity_links.py b/openpype/client/mongo/entity_links.py
new file mode 100644
index 0000000000..fd13a2d83b
--- /dev/null
+++ b/openpype/client/mongo/entity_links.py
@@ -0,0 +1,240 @@
+from .mongo import get_project_connection
+from .entities import (
+ get_assets,
+ get_asset_by_id,
+ get_version_by_id,
+ get_representation_by_id,
+ convert_id,
+)
+
+
+def get_linked_asset_ids(project_name, asset_doc=None, asset_id=None):
+ """Extract linked asset ids from asset document.
+
+ One of asset document or asset id must be passed.
+
+ Note:
+ Asset links now works only from asset to assets.
+
+ Args:
+ asset_doc (dict): Asset document from DB.
+
+ Returns:
+ List[Union[ObjectId, str]]: Asset ids of input links.
+ """
+
+ output = []
+ if not asset_doc and not asset_id:
+ return output
+
+ if not asset_doc:
+ asset_doc = get_asset_by_id(
+ project_name, asset_id, fields=["data.inputLinks"]
+ )
+
+ input_links = asset_doc["data"].get("inputLinks")
+ if not input_links:
+ return output
+
+ for item in input_links:
+ # Backwards compatibility for "_id" key which was replaced with
+ # "id"
+ if "_id" in item:
+ link_id = item["_id"]
+ else:
+ link_id = item["id"]
+ output.append(link_id)
+ return output
+
+
+def get_linked_assets(
+ project_name, asset_doc=None, asset_id=None, fields=None
+):
+ """Return linked assets based on passed asset document.
+
+ One of asset document or asset id must be passed.
+
+ Args:
+ project_name (str): Name of project where to look for queried entities.
+ asset_doc (Dict[str, Any]): Asset document from database.
+ asset_id (Union[ObjectId, str]): Asset id. Can be used instead of
+ asset document.
+ fields (Iterable[str]): Fields that should be returned. All fields are
+ returned if 'None' is passed.
+
+ Returns:
+ List[Dict[str, Any]]: Asset documents of input links for passed
+ asset doc.
+ """
+
+ if not asset_doc:
+ if not asset_id:
+ return []
+ asset_doc = get_asset_by_id(
+ project_name,
+ asset_id,
+ fields=["data.inputLinks"]
+ )
+ if not asset_doc:
+ return []
+
+ link_ids = get_linked_asset_ids(project_name, asset_doc=asset_doc)
+ if not link_ids:
+ return []
+
+ return list(get_assets(project_name, asset_ids=link_ids, fields=fields))
+
+
+def get_linked_representation_id(
+ project_name, repre_doc=None, repre_id=None, link_type=None, max_depth=None
+):
+ """Returns list of linked ids of particular type (if provided).
+
+ One of representation document or representation id must be passed.
+ Note:
+ Representation links now works only from representation through version
+ back to representations.
+
+ Args:
+ project_name (str): Name of project where look for links.
+ repre_doc (Dict[str, Any]): Representation document.
+ repre_id (Union[ObjectId, str]): Representation id.
+ link_type (str): Type of link (e.g. 'reference', ...).
+ max_depth (int): Limit recursion level. Default: 0
+
+ Returns:
+ List[ObjectId] Linked representation ids.
+ """
+
+ if repre_doc:
+ repre_id = repre_doc["_id"]
+
+ if repre_id:
+ repre_id = convert_id(repre_id)
+
+ if not repre_id and not repre_doc:
+ return []
+
+ version_id = None
+ if repre_doc:
+ version_id = repre_doc.get("parent")
+
+ if not version_id:
+ repre_doc = get_representation_by_id(
+ project_name, repre_id, fields=["parent"]
+ )
+ version_id = repre_doc["parent"]
+
+ if not version_id:
+ return []
+
+ version_doc = get_version_by_id(
+ project_name, version_id, fields=["type", "version_id"]
+ )
+ if version_doc["type"] == "hero_version":
+ version_id = version_doc["version_id"]
+
+ if max_depth is None:
+ max_depth = 0
+
+ match = {
+ "_id": version_id,
+ # Links are not stored to hero versions at this moment so filter
+ # is limited to just versions
+ "type": "version"
+ }
+
+ graph_lookup = {
+ "from": project_name,
+ "startWith": "$data.inputLinks.id",
+ "connectFromField": "data.inputLinks.id",
+ "connectToField": "_id",
+ "as": "outputs_recursive",
+ "depthField": "depth"
+ }
+ if max_depth != 0:
+ # We offset by -1 since 0 basically means no recursion
+ # but the recursion only happens after the initial lookup
+ # for outputs.
+ graph_lookup["maxDepth"] = max_depth - 1
+
+ query_pipeline = [
+ # Match
+ {"$match": match},
+ # Recursive graph lookup for inputs
+ {"$graphLookup": graph_lookup}
+ ]
+
+ conn = get_project_connection(project_name)
+ result = conn.aggregate(query_pipeline)
+ referenced_version_ids = _process_referenced_pipeline_result(
+ result, link_type
+ )
+ if not referenced_version_ids:
+ return []
+
+ ref_ids = conn.distinct(
+ "_id",
+ filter={
+ "parent": {"$in": list(referenced_version_ids)},
+ "type": "representation"
+ }
+ )
+
+ return list(ref_ids)
+
+
+def _process_referenced_pipeline_result(result, link_type):
+ """Filters result from pipeline for particular link_type.
+
+ Pipeline cannot use link_type directly in a query.
+
+ Returns:
+ (list)
+ """
+
+ referenced_version_ids = set()
+ correctly_linked_ids = set()
+ for item in result:
+ input_links = item.get("data", {}).get("inputLinks")
+ if not input_links:
+ continue
+
+ _filter_input_links(
+ input_links,
+ link_type,
+ correctly_linked_ids
+ )
+
+ # outputs_recursive in random order, sort by depth
+ outputs_recursive = item.get("outputs_recursive")
+ if not outputs_recursive:
+ continue
+
+ for output in sorted(outputs_recursive, key=lambda o: o["depth"]):
+ # Leaf
+ if output["_id"] not in correctly_linked_ids:
+ continue
+
+ _filter_input_links(
+ output.get("data", {}).get("inputLinks"),
+ link_type,
+ correctly_linked_ids
+ )
+
+ referenced_version_ids.add(output["_id"])
+
+ return referenced_version_ids
+
+
+def _filter_input_links(input_links, link_type, correctly_linked_ids):
+ if not input_links: # to handle hero versions
+ return
+
+ for input_link in input_links:
+ if link_type and input_link["type"] != link_type:
+ continue
+
+ link_id = input_link.get("id") or input_link.get("_id")
+ if link_id is not None:
+ correctly_linked_ids.add(link_id)
diff --git a/openpype/client/mongo.py b/openpype/client/mongo/mongo.py
similarity index 98%
rename from openpype/client/mongo.py
rename to openpype/client/mongo/mongo.py
index 251041c028..2be426efeb 100644
--- a/openpype/client/mongo.py
+++ b/openpype/client/mongo/mongo.py
@@ -11,6 +11,7 @@ from bson.json_util import (
CANONICAL_JSON_OPTIONS
)
+from openpype import AYON_SERVER_ENABLED
if sys.version_info[0] == 2:
from urlparse import urlparse, parse_qs
else:
@@ -134,7 +135,7 @@ def should_add_certificate_path_to_mongo_url(mongo_url):
add_certificate = False
# Check if url 'ssl' or 'tls' are set to 'true'
for key in ("ssl", "tls"):
- if key in query and "true" in query["ssl"]:
+ if key in query and "true" in query[key]:
add_certificate = True
break
@@ -206,6 +207,8 @@ class OpenPypeMongoConnection:
@classmethod
def create_connection(cls, mongo_url, timeout=None, retry_attempts=None):
+ if AYON_SERVER_ENABLED:
+ raise RuntimeError("Created mongo connection in AYON mode")
parsed = urlparse(mongo_url)
# Force validation of scheme
if parsed.scheme not in ["mongodb", "mongodb+srv"]:
@@ -221,7 +224,7 @@ class OpenPypeMongoConnection:
"serverSelectionTimeoutMS": timeout
}
if should_add_certificate_path_to_mongo_url(mongo_url):
- kwargs["ssl_ca_certs"] = certifi.where()
+ kwargs["tlsCAFile"] = certifi.where()
mongo_client = pymongo.MongoClient(mongo_url, **kwargs)
diff --git a/openpype/client/mongo/operations.py b/openpype/client/mongo/operations.py
new file mode 100644
index 0000000000..3537aa4a3d
--- /dev/null
+++ b/openpype/client/mongo/operations.py
@@ -0,0 +1,632 @@
+import re
+import copy
+import collections
+
+from bson.objectid import ObjectId
+from pymongo import DeleteOne, InsertOne, UpdateOne
+
+from openpype.client.operations_base import (
+ REMOVED_VALUE,
+ CreateOperation,
+ UpdateOperation,
+ DeleteOperation,
+ BaseOperationsSession
+)
+from .mongo import get_project_connection
+from .entities import get_project
+
+
+PROJECT_NAME_ALLOWED_SYMBOLS = "a-zA-Z0-9_"
+PROJECT_NAME_REGEX = re.compile(
+ "^[{}]+$".format(PROJECT_NAME_ALLOWED_SYMBOLS)
+)
+
+CURRENT_PROJECT_SCHEMA = "openpype:project-3.0"
+CURRENT_PROJECT_CONFIG_SCHEMA = "openpype:config-2.0"
+CURRENT_ASSET_DOC_SCHEMA = "openpype:asset-3.0"
+CURRENT_SUBSET_SCHEMA = "openpype:subset-3.0"
+CURRENT_VERSION_SCHEMA = "openpype:version-3.0"
+CURRENT_HERO_VERSION_SCHEMA = "openpype:hero_version-1.0"
+CURRENT_REPRESENTATION_SCHEMA = "openpype:representation-2.0"
+CURRENT_WORKFILE_INFO_SCHEMA = "openpype:workfile-1.0"
+CURRENT_THUMBNAIL_SCHEMA = "openpype:thumbnail-1.0"
+
+
+def _create_or_convert_to_mongo_id(mongo_id):
+ if mongo_id is None:
+ return ObjectId()
+ return ObjectId(mongo_id)
+
+
+def new_project_document(
+ project_name, project_code, config, data=None, entity_id=None
+):
+ """Create skeleton data of project document.
+
+ Args:
+ project_name (str): Name of project. Used as identifier of a project.
+ project_code (str): Shorter version of projet without spaces and
+ special characters (in most of cases). Should be also considered
+ as unique name across projects.
+ config (Dic[str, Any]): Project config consist of roots, templates,
+ applications and other project Anatomy related data.
+ data (Dict[str, Any]): Project data with information about it's
+ attributes (e.g. 'fps' etc.) or integration specific keys.
+ entity_id (Union[str, ObjectId]): Predefined id of document. New id is
+ created if not passed.
+
+ Returns:
+ Dict[str, Any]: Skeleton of project document.
+ """
+
+ if data is None:
+ data = {}
+
+ data["code"] = project_code
+
+ return {
+ "_id": _create_or_convert_to_mongo_id(entity_id),
+ "name": project_name,
+ "type": CURRENT_PROJECT_SCHEMA,
+ "entity_data": data,
+ "config": config
+ }
+
+
+def new_asset_document(
+ name, project_id, parent_id, parents, data=None, entity_id=None
+):
+ """Create skeleton data of asset document.
+
+ Args:
+ name (str): Is considered as unique identifier of asset in project.
+ project_id (Union[str, ObjectId]): Id of project doument.
+ parent_id (Union[str, ObjectId]): Id of parent asset.
+ parents (List[str]): List of parent assets names.
+ data (Dict[str, Any]): Asset document data. Empty dictionary is used
+ if not passed. Value of 'parent_id' is used to fill 'visualParent'.
+ entity_id (Union[str, ObjectId]): Predefined id of document. New id is
+ created if not passed.
+
+ Returns:
+ Dict[str, Any]: Skeleton of asset document.
+ """
+
+ if data is None:
+ data = {}
+ if parent_id is not None:
+ parent_id = ObjectId(parent_id)
+ data["visualParent"] = parent_id
+ data["parents"] = parents
+
+ return {
+ "_id": _create_or_convert_to_mongo_id(entity_id),
+ "type": "asset",
+ "name": name,
+ "parent": ObjectId(project_id),
+ "data": data,
+ "schema": CURRENT_ASSET_DOC_SCHEMA
+ }
+
+
+def new_subset_document(name, family, asset_id, data=None, entity_id=None):
+ """Create skeleton data of subset document.
+
+ Args:
+ name (str): Is considered as unique identifier of subset under asset.
+ family (str): Subset's family.
+ asset_id (Union[str, ObjectId]): Id of parent asset.
+ data (Dict[str, Any]): Subset document data. Empty dictionary is used
+ if not passed. Value of 'family' is used to fill 'family'.
+ entity_id (Union[str, ObjectId]): Predefined id of document. New id is
+ created if not passed.
+
+ Returns:
+ Dict[str, Any]: Skeleton of subset document.
+ """
+
+ if data is None:
+ data = {}
+ data["family"] = family
+ return {
+ "_id": _create_or_convert_to_mongo_id(entity_id),
+ "schema": CURRENT_SUBSET_SCHEMA,
+ "type": "subset",
+ "name": name,
+ "data": data,
+ "parent": asset_id
+ }
+
+
+def new_version_doc(version, subset_id, data=None, entity_id=None):
+ """Create skeleton data of version document.
+
+ Args:
+ version (int): Is considered as unique identifier of version
+ under subset.
+ subset_id (Union[str, ObjectId]): Id of parent subset.
+ data (Dict[str, Any]): Version document data.
+ entity_id (Union[str, ObjectId]): Predefined id of document. New id is
+ created if not passed.
+
+ Returns:
+ Dict[str, Any]: Skeleton of version document.
+ """
+
+ if data is None:
+ data = {}
+
+ return {
+ "_id": _create_or_convert_to_mongo_id(entity_id),
+ "schema": CURRENT_VERSION_SCHEMA,
+ "type": "version",
+ "name": int(version),
+ "parent": subset_id,
+ "data": data
+ }
+
+
+def new_hero_version_doc(version_id, subset_id, data=None, entity_id=None):
+ """Create skeleton data of hero version document.
+
+ Args:
+ version_id (ObjectId): Is considered as unique identifier of version
+ under subset.
+ subset_id (Union[str, ObjectId]): Id of parent subset.
+ data (Dict[str, Any]): Version document data.
+ entity_id (Union[str, ObjectId]): Predefined id of document. New id is
+ created if not passed.
+
+ Returns:
+ Dict[str, Any]: Skeleton of version document.
+ """
+
+ if data is None:
+ data = {}
+
+ return {
+ "_id": _create_or_convert_to_mongo_id(entity_id),
+ "schema": CURRENT_HERO_VERSION_SCHEMA,
+ "type": "hero_version",
+ "version_id": version_id,
+ "parent": subset_id,
+ "data": data
+ }
+
+
+def new_representation_doc(
+ name, version_id, context, data=None, entity_id=None
+):
+ """Create skeleton data of asset document.
+
+ Args:
+ version (int): Is considered as unique identifier of version
+ under subset.
+ version_id (Union[str, ObjectId]): Id of parent version.
+ context (Dict[str, Any]): Representation context used for fill template
+ of to query.
+ data (Dict[str, Any]): Representation document data.
+ entity_id (Union[str, ObjectId]): Predefined id of document. New id is
+ created if not passed.
+
+ Returns:
+ Dict[str, Any]: Skeleton of version document.
+ """
+
+ if data is None:
+ data = {}
+
+ return {
+ "_id": _create_or_convert_to_mongo_id(entity_id),
+ "schema": CURRENT_REPRESENTATION_SCHEMA,
+ "type": "representation",
+ "parent": version_id,
+ "name": name,
+ "data": data,
+
+ # Imprint shortcut to context for performance reasons.
+ "context": context
+ }
+
+
+def new_thumbnail_doc(data=None, entity_id=None):
+ """Create skeleton data of thumbnail document.
+
+ Args:
+ data (Dict[str, Any]): Thumbnail document data.
+ entity_id (Union[str, ObjectId]): Predefined id of document. New id is
+ created if not passed.
+
+ Returns:
+ Dict[str, Any]: Skeleton of thumbnail document.
+ """
+
+ if data is None:
+ data = {}
+
+ return {
+ "_id": _create_or_convert_to_mongo_id(entity_id),
+ "type": "thumbnail",
+ "schema": CURRENT_THUMBNAIL_SCHEMA,
+ "data": data
+ }
+
+
+def new_workfile_info_doc(
+ filename, asset_id, task_name, files, data=None, entity_id=None
+):
+ """Create skeleton data of workfile info document.
+
+ Workfile document is at this moment used primarily for artist notes.
+
+ Args:
+ filename (str): Filename of workfile.
+ asset_id (Union[str, ObjectId]): Id of asset under which workfile live.
+ task_name (str): Task under which was workfile created.
+ files (List[str]): List of rootless filepaths related to workfile.
+ data (Dict[str, Any]): Additional metadata.
+
+ Returns:
+ Dict[str, Any]: Skeleton of workfile info document.
+ """
+
+ if not data:
+ data = {}
+
+ return {
+ "_id": _create_or_convert_to_mongo_id(entity_id),
+ "type": "workfile",
+ "parent": ObjectId(asset_id),
+ "task_name": task_name,
+ "filename": filename,
+ "data": data,
+ "files": files
+ }
+
+
+def _prepare_update_data(old_doc, new_doc, replace):
+ changes = {}
+ for key, value in new_doc.items():
+ if key not in old_doc or value != old_doc[key]:
+ changes[key] = value
+
+ if replace:
+ for key in old_doc.keys():
+ if key not in new_doc:
+ changes[key] = REMOVED_VALUE
+ return changes
+
+
+def prepare_subset_update_data(old_doc, new_doc, replace=True):
+ """Compare two subset documents and prepare update data.
+
+ Based on compared values will create update data for
+ 'MongoUpdateOperation'.
+
+ Empty output means that documents are identical.
+
+ Returns:
+ Dict[str, Any]: Changes between old and new document.
+ """
+
+ return _prepare_update_data(old_doc, new_doc, replace)
+
+
+def prepare_version_update_data(old_doc, new_doc, replace=True):
+ """Compare two version documents and prepare update data.
+
+ Based on compared values will create update data for
+ 'MongoUpdateOperation'.
+
+ Empty output means that documents are identical.
+
+ Returns:
+ Dict[str, Any]: Changes between old and new document.
+ """
+
+ return _prepare_update_data(old_doc, new_doc, replace)
+
+
+def prepare_hero_version_update_data(old_doc, new_doc, replace=True):
+ """Compare two hero version documents and prepare update data.
+
+ Based on compared values will create update data for 'UpdateOperation'.
+
+ Empty output means that documents are identical.
+
+ Returns:
+ Dict[str, Any]: Changes between old and new document.
+ """
+
+ return _prepare_update_data(old_doc, new_doc, replace)
+
+
+def prepare_representation_update_data(old_doc, new_doc, replace=True):
+ """Compare two representation documents and prepare update data.
+
+ Based on compared values will create update data for
+ 'MongoUpdateOperation'.
+
+ Empty output means that documents are identical.
+
+ Returns:
+ Dict[str, Any]: Changes between old and new document.
+ """
+
+ return _prepare_update_data(old_doc, new_doc, replace)
+
+
+def prepare_workfile_info_update_data(old_doc, new_doc, replace=True):
+ """Compare two workfile info documents and prepare update data.
+
+ Based on compared values will create update data for
+ 'MongoUpdateOperation'.
+
+ Empty output means that documents are identical.
+
+ Returns:
+ Dict[str, Any]: Changes between old and new document.
+ """
+
+ return _prepare_update_data(old_doc, new_doc, replace)
+
+
+class MongoCreateOperation(CreateOperation):
+ """Operation to create an entity.
+
+ Args:
+ project_name (str): On which project operation will happen.
+ entity_type (str): Type of entity on which change happens.
+ e.g. 'asset', 'representation' etc.
+ data (Dict[str, Any]): Data of entity that will be created.
+ """
+
+ operation_name = "create"
+
+ def __init__(self, project_name, entity_type, data):
+ super(MongoCreateOperation, self).__init__(
+ project_name, entity_type, data
+ )
+
+ if "_id" not in self._data:
+ self._data["_id"] = ObjectId()
+ else:
+ self._data["_id"] = ObjectId(self._data["_id"])
+
+ @property
+ def entity_id(self):
+ return self._data["_id"]
+
+ def to_mongo_operation(self):
+ return InsertOne(copy.deepcopy(self._data))
+
+
+class MongoUpdateOperation(UpdateOperation):
+ """Operation to update an entity.
+
+ Args:
+ project_name (str): On which project operation will happen.
+ entity_type (str): Type of entity on which change happens.
+ e.g. 'asset', 'representation' etc.
+ entity_id (Union[str, ObjectId]): Identifier of an entity.
+ update_data (Dict[str, Any]): Key -> value changes that will be set in
+ database. If value is set to 'REMOVED_VALUE' the key will be
+ removed. Only first level of dictionary is checked (on purpose).
+ """
+
+ operation_name = "update"
+
+ def __init__(self, project_name, entity_type, entity_id, update_data):
+ super(MongoUpdateOperation, self).__init__(
+ project_name, entity_type, entity_id, update_data
+ )
+
+ self._entity_id = ObjectId(self._entity_id)
+
+ def to_mongo_operation(self):
+ unset_data = {}
+ set_data = {}
+ for key, value in self._update_data.items():
+ if value is REMOVED_VALUE:
+ unset_data[key] = None
+ else:
+ set_data[key] = value
+
+ op_data = {}
+ if unset_data:
+ op_data["$unset"] = unset_data
+ if set_data:
+ op_data["$set"] = set_data
+
+ if not op_data:
+ return None
+
+ return UpdateOne(
+ {"_id": self.entity_id},
+ op_data
+ )
+
+
+class MongoDeleteOperation(DeleteOperation):
+ """Operation to delete an entity.
+
+ Args:
+ project_name (str): On which project operation will happen.
+ entity_type (str): Type of entity on which change happens.
+ e.g. 'asset', 'representation' etc.
+ entity_id (Union[str, ObjectId]): Entity id that will be removed.
+ """
+
+ operation_name = "delete"
+
+ def __init__(self, project_name, entity_type, entity_id):
+ super(MongoDeleteOperation, self).__init__(
+ project_name, entity_type, entity_id
+ )
+
+ self._entity_id = ObjectId(self._entity_id)
+
+ def to_mongo_operation(self):
+ return DeleteOne({"_id": self.entity_id})
+
+
+class MongoOperationsSession(BaseOperationsSession):
+ """Session storing operations that should happen in an order.
+
+ At this moment does not handle anything special can be sonsidered as
+ stupid list of operations that will happen after each other. If creation
+ of same entity is there multiple times it's handled in any way and document
+ values are not validated.
+
+ All operations must be related to single project.
+
+ Args:
+ project_name (str): Project name to which are operations related.
+ """
+
+ def commit(self):
+ """Commit session operations."""
+
+ operations, self._operations = self._operations, []
+ if not operations:
+ return
+
+ operations_by_project = collections.defaultdict(list)
+ for operation in operations:
+ operations_by_project[operation.project_name].append(operation)
+
+ for project_name, operations in operations_by_project.items():
+ bulk_writes = []
+ for operation in operations:
+ mongo_op = operation.to_mongo_operation()
+ if mongo_op is not None:
+ bulk_writes.append(mongo_op)
+
+ if bulk_writes:
+ collection = get_project_connection(project_name)
+ collection.bulk_write(bulk_writes)
+
+ def create_entity(self, project_name, entity_type, data):
+ """Fast access to 'MongoCreateOperation'.
+
+ Returns:
+ MongoCreateOperation: Object of update operation.
+ """
+
+ operation = MongoCreateOperation(project_name, entity_type, data)
+ self.add(operation)
+ return operation
+
+ def update_entity(self, project_name, entity_type, entity_id, update_data):
+ """Fast access to 'MongoUpdateOperation'.
+
+ Returns:
+ MongoUpdateOperation: Object of update operation.
+ """
+
+ operation = MongoUpdateOperation(
+ project_name, entity_type, entity_id, update_data
+ )
+ self.add(operation)
+ return operation
+
+ def delete_entity(self, project_name, entity_type, entity_id):
+ """Fast access to 'MongoDeleteOperation'.
+
+ Returns:
+ MongoDeleteOperation: Object of delete operation.
+ """
+
+ operation = MongoDeleteOperation(project_name, entity_type, entity_id)
+ self.add(operation)
+ return operation
+
+
+def create_project(
+ project_name,
+ project_code,
+ library_project=False,
+):
+ """Create project using OpenPype settings.
+
+ This project creation function is not validating project document on
+ creation. It is because project document is created blindly with only
+ minimum required information about project which is it's name, code, type
+ and schema.
+
+ Entered project name must be unique and project must not exist yet.
+
+ Note:
+ This function is here to be OP v4 ready but in v3 has more logic
+ to do. That's why inner imports are in the body.
+
+ Args:
+ project_name(str): New project name. Should be unique.
+ project_code(str): Project's code should be unique too.
+ library_project(bool): Project is library project.
+
+ Raises:
+ ValueError: When project name already exists in MongoDB.
+
+ Returns:
+ dict: Created project document.
+ """
+
+ from openpype.settings import ProjectSettings, SaveWarningExc
+ from openpype.pipeline.schema import validate
+
+ if get_project(project_name, fields=["name"]):
+ raise ValueError("Project with name \"{}\" already exists".format(
+ project_name
+ ))
+
+ if not PROJECT_NAME_REGEX.match(project_name):
+ raise ValueError((
+ "Project name \"{}\" contain invalid characters"
+ ).format(project_name))
+
+ project_doc = {
+ "type": "project",
+ "name": project_name,
+ "data": {
+ "code": project_code,
+ "library_project": library_project
+ },
+ "schema": CURRENT_PROJECT_SCHEMA
+ }
+
+ op_session = MongoOperationsSession()
+ # Insert document with basic data
+ create_op = op_session.create_entity(
+ project_name, project_doc["type"], project_doc
+ )
+ op_session.commit()
+
+ # Load ProjectSettings for the project and save it to store all attributes
+ # and Anatomy
+ try:
+ project_settings_entity = ProjectSettings(project_name)
+ project_settings_entity.save()
+ except SaveWarningExc as exc:
+ print(str(exc))
+ except Exception:
+ op_session.delete_entity(
+ project_name, project_doc["type"], create_op.entity_id
+ )
+ op_session.commit()
+ raise
+
+ project_doc = get_project(project_name)
+
+ try:
+ # Validate created project document
+ validate(project_doc)
+ except Exception:
+ # Remove project if is not valid
+ op_session.delete_entity(
+ project_name, project_doc["type"], create_op.entity_id
+ )
+ op_session.commit()
+ raise
+
+ return project_doc
diff --git a/openpype/client/operations.py b/openpype/client/operations.py
index e8c9d28636..8bc09dffd3 100644
--- a/openpype/client/operations.py
+++ b/openpype/client/operations.py
@@ -1,797 +1,24 @@
-import re
-import uuid
-import copy
-import collections
-from abc import ABCMeta, abstractmethod, abstractproperty
+from openpype import AYON_SERVER_ENABLED
-import six
-from bson.objectid import ObjectId
-from pymongo import DeleteOne, InsertOne, UpdateOne
+from .operations_base import REMOVED_VALUE
+if not AYON_SERVER_ENABLED:
+ from .mongo.operations import *
+ OperationsSession = MongoOperationsSession
-from .mongo import get_project_connection
-from .entities import get_project
-
-REMOVED_VALUE = object()
-
-PROJECT_NAME_ALLOWED_SYMBOLS = "a-zA-Z0-9_"
-PROJECT_NAME_REGEX = re.compile(
- "^[{}]+$".format(PROJECT_NAME_ALLOWED_SYMBOLS)
-)
-
-CURRENT_PROJECT_SCHEMA = "openpype:project-3.0"
-CURRENT_PROJECT_CONFIG_SCHEMA = "openpype:config-2.0"
-CURRENT_ASSET_DOC_SCHEMA = "openpype:asset-3.0"
-CURRENT_SUBSET_SCHEMA = "openpype:subset-3.0"
-CURRENT_VERSION_SCHEMA = "openpype:version-3.0"
-CURRENT_HERO_VERSION_SCHEMA = "openpype:hero_version-1.0"
-CURRENT_REPRESENTATION_SCHEMA = "openpype:representation-2.0"
-CURRENT_WORKFILE_INFO_SCHEMA = "openpype:workfile-1.0"
-CURRENT_THUMBNAIL_SCHEMA = "openpype:thumbnail-1.0"
-
-
-def _create_or_convert_to_mongo_id(mongo_id):
- if mongo_id is None:
- return ObjectId()
- return ObjectId(mongo_id)
-
-
-def new_project_document(
- project_name, project_code, config, data=None, entity_id=None
-):
- """Create skeleton data of project document.
-
- Args:
- project_name (str): Name of project. Used as identifier of a project.
- project_code (str): Shorter version of projet without spaces and
- special characters (in most of cases). Should be also considered
- as unique name across projects.
- config (Dic[str, Any]): Project config consist of roots, templates,
- applications and other project Anatomy related data.
- data (Dict[str, Any]): Project data with information about it's
- attributes (e.g. 'fps' etc.) or integration specific keys.
- entity_id (Union[str, ObjectId]): Predefined id of document. New id is
- created if not passed.
-
- Returns:
- Dict[str, Any]: Skeleton of project document.
- """
-
- if data is None:
- data = {}
-
- data["code"] = project_code
-
- return {
- "_id": _create_or_convert_to_mongo_id(entity_id),
- "name": project_name,
- "type": CURRENT_PROJECT_SCHEMA,
- "entity_data": data,
- "config": config
- }
-
-
-def new_asset_document(
- name, project_id, parent_id, parents, data=None, entity_id=None
-):
- """Create skeleton data of asset document.
-
- Args:
- name (str): Is considered as unique identifier of asset in project.
- project_id (Union[str, ObjectId]): Id of project doument.
- parent_id (Union[str, ObjectId]): Id of parent asset.
- parents (List[str]): List of parent assets names.
- data (Dict[str, Any]): Asset document data. Empty dictionary is used
- if not passed. Value of 'parent_id' is used to fill 'visualParent'.
- entity_id (Union[str, ObjectId]): Predefined id of document. New id is
- created if not passed.
-
- Returns:
- Dict[str, Any]: Skeleton of asset document.
- """
-
- if data is None:
- data = {}
- if parent_id is not None:
- parent_id = ObjectId(parent_id)
- data["visualParent"] = parent_id
- data["parents"] = parents
-
- return {
- "_id": _create_or_convert_to_mongo_id(entity_id),
- "type": "asset",
- "name": name,
- "parent": ObjectId(project_id),
- "data": data,
- "schema": CURRENT_ASSET_DOC_SCHEMA
- }
-
-
-def new_subset_document(name, family, asset_id, data=None, entity_id=None):
- """Create skeleton data of subset document.
-
- Args:
- name (str): Is considered as unique identifier of subset under asset.
- family (str): Subset's family.
- asset_id (Union[str, ObjectId]): Id of parent asset.
- data (Dict[str, Any]): Subset document data. Empty dictionary is used
- if not passed. Value of 'family' is used to fill 'family'.
- entity_id (Union[str, ObjectId]): Predefined id of document. New id is
- created if not passed.
-
- Returns:
- Dict[str, Any]: Skeleton of subset document.
- """
-
- if data is None:
- data = {}
- data["family"] = family
- return {
- "_id": _create_or_convert_to_mongo_id(entity_id),
- "schema": CURRENT_SUBSET_SCHEMA,
- "type": "subset",
- "name": name,
- "data": data,
- "parent": asset_id
- }
-
-
-def new_version_doc(version, subset_id, data=None, entity_id=None):
- """Create skeleton data of version document.
-
- Args:
- version (int): Is considered as unique identifier of version
- under subset.
- subset_id (Union[str, ObjectId]): Id of parent subset.
- data (Dict[str, Any]): Version document data.
- entity_id (Union[str, ObjectId]): Predefined id of document. New id is
- created if not passed.
-
- Returns:
- Dict[str, Any]: Skeleton of version document.
- """
-
- if data is None:
- data = {}
-
- return {
- "_id": _create_or_convert_to_mongo_id(entity_id),
- "schema": CURRENT_VERSION_SCHEMA,
- "type": "version",
- "name": int(version),
- "parent": subset_id,
- "data": data
- }
-
-
-def new_hero_version_doc(version_id, subset_id, data=None, entity_id=None):
- """Create skeleton data of hero version document.
-
- Args:
- version_id (ObjectId): Is considered as unique identifier of version
- under subset.
- subset_id (Union[str, ObjectId]): Id of parent subset.
- data (Dict[str, Any]): Version document data.
- entity_id (Union[str, ObjectId]): Predefined id of document. New id is
- created if not passed.
-
- Returns:
- Dict[str, Any]: Skeleton of version document.
- """
-
- if data is None:
- data = {}
-
- return {
- "_id": _create_or_convert_to_mongo_id(entity_id),
- "schema": CURRENT_HERO_VERSION_SCHEMA,
- "type": "hero_version",
- "version_id": version_id,
- "parent": subset_id,
- "data": data
- }
-
-
-def new_representation_doc(
- name, version_id, context, data=None, entity_id=None
-):
- """Create skeleton data of asset document.
-
- Args:
- version (int): Is considered as unique identifier of version
- under subset.
- version_id (Union[str, ObjectId]): Id of parent version.
- context (Dict[str, Any]): Representation context used for fill template
- of to query.
- data (Dict[str, Any]): Representation document data.
- entity_id (Union[str, ObjectId]): Predefined id of document. New id is
- created if not passed.
-
- Returns:
- Dict[str, Any]: Skeleton of version document.
- """
-
- if data is None:
- data = {}
-
- return {
- "_id": _create_or_convert_to_mongo_id(entity_id),
- "schema": CURRENT_REPRESENTATION_SCHEMA,
- "type": "representation",
- "parent": version_id,
- "name": name,
- "data": data,
- # Imprint shortcut to context for performance reasons.
- "context": context
- }
-
-
-def new_thumbnail_doc(data=None, entity_id=None):
- """Create skeleton data of thumbnail document.
-
- Args:
- data (Dict[str, Any]): Thumbnail document data.
- entity_id (Union[str, ObjectId]): Predefined id of document. New id is
- created if not passed.
-
- Returns:
- Dict[str, Any]: Skeleton of thumbnail document.
- """
-
- if data is None:
- data = {}
-
- return {
- "_id": _create_or_convert_to_mongo_id(entity_id),
- "type": "thumbnail",
- "schema": CURRENT_THUMBNAIL_SCHEMA,
- "data": data
- }
-
-
-def new_workfile_info_doc(
- filename, asset_id, task_name, files, data=None, entity_id=None
-):
- """Create skeleton data of workfile info document.
-
- Workfile document is at this moment used primarily for artist notes.
-
- Args:
- filename (str): Filename of workfile.
- asset_id (Union[str, ObjectId]): Id of asset under which workfile live.
- task_name (str): Task under which was workfile created.
- files (List[str]): List of rootless filepaths related to workfile.
- data (Dict[str, Any]): Additional metadata.
-
- Returns:
- Dict[str, Any]: Skeleton of workfile info document.
- """
-
- if not data:
- data = {}
-
- return {
- "_id": _create_or_convert_to_mongo_id(entity_id),
- "type": "workfile",
- "parent": ObjectId(asset_id),
- "task_name": task_name,
- "filename": filename,
- "data": data,
- "files": files
- }
-
-
-def _prepare_update_data(old_doc, new_doc, replace):
- changes = {}
- for key, value in new_doc.items():
- if key not in old_doc or value != old_doc[key]:
- changes[key] = value
-
- if replace:
- for key in old_doc.keys():
- if key not in new_doc:
- changes[key] = REMOVED_VALUE
- return changes
-
-
-def prepare_subset_update_data(old_doc, new_doc, replace=True):
- """Compare two subset documents and prepare update data.
-
- Based on compared values will create update data for 'UpdateOperation'.
-
- Empty output means that documents are identical.
-
- Returns:
- Dict[str, Any]: Changes between old and new document.
- """
-
- return _prepare_update_data(old_doc, new_doc, replace)
-
-
-def prepare_version_update_data(old_doc, new_doc, replace=True):
- """Compare two version documents and prepare update data.
-
- Based on compared values will create update data for 'UpdateOperation'.
-
- Empty output means that documents are identical.
-
- Returns:
- Dict[str, Any]: Changes between old and new document.
- """
-
- return _prepare_update_data(old_doc, new_doc, replace)
-
-
-def prepare_hero_version_update_data(old_doc, new_doc, replace=True):
- """Compare two hero version documents and prepare update data.
-
- Based on compared values will create update data for 'UpdateOperation'.
-
- Empty output means that documents are identical.
-
- Returns:
- Dict[str, Any]: Changes between old and new document.
- """
-
- return _prepare_update_data(old_doc, new_doc, replace)
-
-
-def prepare_representation_update_data(old_doc, new_doc, replace=True):
- """Compare two representation documents and prepare update data.
-
- Based on compared values will create update data for 'UpdateOperation'.
-
- Empty output means that documents are identical.
-
- Returns:
- Dict[str, Any]: Changes between old and new document.
- """
-
- return _prepare_update_data(old_doc, new_doc, replace)
-
-
-def prepare_workfile_info_update_data(old_doc, new_doc, replace=True):
- """Compare two workfile info documents and prepare update data.
-
- Based on compared values will create update data for 'UpdateOperation'.
-
- Empty output means that documents are identical.
-
- Returns:
- Dict[str, Any]: Changes between old and new document.
- """
-
- return _prepare_update_data(old_doc, new_doc, replace)
-
-
-@six.add_metaclass(ABCMeta)
-class AbstractOperation(object):
- """Base operation class.
-
- Operation represent a call into database. The call can create, change or
- remove data.
-
- Args:
- project_name (str): On which project operation will happen.
- entity_type (str): Type of entity on which change happens.
- e.g. 'asset', 'representation' etc.
- """
-
- def __init__(self, project_name, entity_type):
- self._project_name = project_name
- self._entity_type = entity_type
- self._id = str(uuid.uuid4())
-
- @property
- def project_name(self):
- return self._project_name
-
- @property
- def id(self):
- """Identifier of operation."""
-
- return self._id
-
- @property
- def entity_type(self):
- return self._entity_type
-
- @abstractproperty
- def operation_name(self):
- """Stringified type of operation."""
-
- pass
-
- @abstractmethod
- def to_mongo_operation(self):
- """Convert operation to Mongo batch operation."""
-
- pass
-
- def to_data(self):
- """Convert operation to data that can be converted to json or others.
-
- Warning:
- Current state returns ObjectId objects which cannot be parsed by
- json.
-
- Returns:
- Dict[str, Any]: Description of operation.
- """
-
- return {
- "id": self._id,
- "entity_type": self.entity_type,
- "project_name": self.project_name,
- "operation": self.operation_name
- }
-
-
-class CreateOperation(AbstractOperation):
- """Operation to create an entity.
-
- Args:
- project_name (str): On which project operation will happen.
- entity_type (str): Type of entity on which change happens.
- e.g. 'asset', 'representation' etc.
- data (Dict[str, Any]): Data of entity that will be created.
- """
-
- operation_name = "create"
-
- def __init__(self, project_name, entity_type, data):
- super(CreateOperation, self).__init__(project_name, entity_type)
-
- if not data:
- data = {}
- else:
- data = copy.deepcopy(dict(data))
-
- if "_id" not in data:
- data["_id"] = ObjectId()
- else:
- data["_id"] = ObjectId(data["_id"])
-
- self._entity_id = data["_id"]
- self._data = data
-
- def __setitem__(self, key, value):
- self.set_value(key, value)
-
- def __getitem__(self, key):
- return self.data[key]
-
- def set_value(self, key, value):
- self.data[key] = value
-
- def get(self, key, *args, **kwargs):
- return self.data.get(key, *args, **kwargs)
-
- @property
- def entity_id(self):
- return self._entity_id
-
- @property
- def data(self):
- return self._data
-
- def to_mongo_operation(self):
- return InsertOne(copy.deepcopy(self._data))
-
- def to_data(self):
- output = super(CreateOperation, self).to_data()
- output["data"] = copy.deepcopy(self.data)
- return output
-
-
-class UpdateOperation(AbstractOperation):
- """Operation to update an entity.
-
- Args:
- project_name (str): On which project operation will happen.
- entity_type (str): Type of entity on which change happens.
- e.g. 'asset', 'representation' etc.
- entity_id (Union[str, ObjectId]): Identifier of an entity.
- update_data (Dict[str, Any]): Key -> value changes that will be set in
- database. If value is set to 'REMOVED_VALUE' the key will be
- removed. Only first level of dictionary is checked (on purpose).
- """
-
- operation_name = "update"
-
- def __init__(self, project_name, entity_type, entity_id, update_data):
- super(UpdateOperation, self).__init__(project_name, entity_type)
-
- self._entity_id = ObjectId(entity_id)
- self._update_data = update_data
-
- @property
- def entity_id(self):
- return self._entity_id
-
- @property
- def update_data(self):
- return self._update_data
-
- def to_mongo_operation(self):
- unset_data = {}
- set_data = {}
- for key, value in self._update_data.items():
- if value is REMOVED_VALUE:
- unset_data[key] = None
- else:
- set_data[key] = value
-
- op_data = {}
- if unset_data:
- op_data["$unset"] = unset_data
- if set_data:
- op_data["$set"] = set_data
-
- if not op_data:
- return None
-
- return UpdateOne(
- {"_id": self.entity_id},
- op_data
- )
-
- def to_data(self):
- changes = {}
- for key, value in self._update_data.items():
- if value is REMOVED_VALUE:
- value = None
- changes[key] = value
-
- output = super(UpdateOperation, self).to_data()
- output.update({
- "entity_id": self.entity_id,
- "changes": changes
- })
- return output
-
-
-class DeleteOperation(AbstractOperation):
- """Operation to delete an entity.
-
- Args:
- project_name (str): On which project operation will happen.
- entity_type (str): Type of entity on which change happens.
- e.g. 'asset', 'representation' etc.
- entity_id (Union[str, ObjectId]): Entity id that will be removed.
- """
-
- operation_name = "delete"
-
- def __init__(self, project_name, entity_type, entity_id):
- super(DeleteOperation, self).__init__(project_name, entity_type)
-
- self._entity_id = ObjectId(entity_id)
-
- @property
- def entity_id(self):
- return self._entity_id
-
- def to_mongo_operation(self):
- return DeleteOne({"_id": self.entity_id})
-
- def to_data(self):
- output = super(DeleteOperation, self).to_data()
- output["entity_id"] = self.entity_id
- return output
-
-
-class OperationsSession(object):
- """Session storing operations that should happen in an order.
-
- At this moment does not handle anything special can be sonsidered as
- stupid list of operations that will happen after each other. If creation
- of same entity is there multiple times it's handled in any way and document
- values are not validated.
-
- All operations must be related to single project.
-
- Args:
- project_name (str): Project name to which are operations related.
- """
-
- def __init__(self):
- self._operations = []
-
- def add(self, operation):
- """Add operation to be processed.
-
- Args:
- operation (BaseOperation): Operation that should be processed.
- """
- if not isinstance(
- operation,
- (CreateOperation, UpdateOperation, DeleteOperation)
- ):
- raise TypeError("Expected Operation object got {}".format(
- str(type(operation))
- ))
-
- self._operations.append(operation)
-
- def append(self, operation):
- """Add operation to be processed.
-
- Args:
- operation (BaseOperation): Operation that should be processed.
- """
-
- self.add(operation)
-
- def extend(self, operations):
- """Add operations to be processed.
-
- Args:
- operations (List[BaseOperation]): Operations that should be
- processed.
- """
-
- for operation in operations:
- self.add(operation)
-
- def remove(self, operation):
- """Remove operation."""
-
- self._operations.remove(operation)
-
- def clear(self):
- """Clear all registered operations."""
-
- self._operations = []
-
- def to_data(self):
- return [
- operation.to_data()
- for operation in self._operations
- ]
-
- def commit(self):
- """Commit session operations."""
-
- operations, self._operations = self._operations, []
- if not operations:
- return
-
- operations_by_project = collections.defaultdict(list)
- for operation in operations:
- operations_by_project[operation.project_name].append(operation)
-
- for project_name, operations in operations_by_project.items():
- bulk_writes = []
- for operation in operations:
- mongo_op = operation.to_mongo_operation()
- if mongo_op is not None:
- bulk_writes.append(mongo_op)
-
- if bulk_writes:
- collection = get_project_connection(project_name)
- collection.bulk_write(bulk_writes)
-
- def create_entity(self, project_name, entity_type, data):
- """Fast access to 'CreateOperation'.
-
- Returns:
- CreateOperation: Object of update operation.
- """
-
- operation = CreateOperation(project_name, entity_type, data)
- self.add(operation)
- return operation
-
- def update_entity(self, project_name, entity_type, entity_id, update_data):
- """Fast access to 'UpdateOperation'.
-
- Returns:
- UpdateOperation: Object of update operation.
- """
-
- operation = UpdateOperation(
- project_name, entity_type, entity_id, update_data
- )
- self.add(operation)
- return operation
-
- def delete_entity(self, project_name, entity_type, entity_id):
- """Fast access to 'DeleteOperation'.
-
- Returns:
- DeleteOperation: Object of delete operation.
- """
-
- operation = DeleteOperation(project_name, entity_type, entity_id)
- self.add(operation)
- return operation
-
-
-def create_project(
- project_name,
- project_code,
- library_project=False,
-):
- """Create project using OpenPype settings.
-
- This project creation function is not validating project document on
- creation. It is because project document is created blindly with only
- minimum required information about project which is it's name, code, type
- and schema.
-
- Entered project name must be unique and project must not exist yet.
-
- Note:
- This function is here to be OP v4 ready but in v3 has more logic
- to do. That's why inner imports are in the body.
-
- Args:
- project_name(str): New project name. Should be unique.
- project_code(str): Project's code should be unique too.
- library_project(bool): Project is library project.
-
- Raises:
- ValueError: When project name already exists in MongoDB.
-
- Returns:
- dict: Created project document.
- """
-
- from openpype.settings import ProjectSettings, SaveWarningExc
- from openpype.pipeline.schema import validate
-
- if get_project(project_name, fields=["name"]):
- raise ValueError("Project with name \"{}\" already exists".format(
- project_name
- ))
-
- if not PROJECT_NAME_REGEX.match(project_name):
- raise ValueError((
- "Project name \"{}\" contain invalid characters"
- ).format(project_name))
-
- project_doc = {
- "type": "project",
- "name": project_name,
- "data": {
- "code": project_code,
- "library_project": library_project,
- },
- "schema": CURRENT_PROJECT_SCHEMA
- }
-
- op_session = OperationsSession()
- # Insert document with basic data
- create_op = op_session.create_entity(
- project_name, project_doc["type"], project_doc
+else:
+ from ayon_api.server_api import (
+ PROJECT_NAME_ALLOWED_SYMBOLS,
+ PROJECT_NAME_REGEX,
+ )
+ from .server.operations import *
+ from .mongo.operations import (
+ CURRENT_PROJECT_SCHEMA,
+ CURRENT_PROJECT_CONFIG_SCHEMA,
+ CURRENT_ASSET_DOC_SCHEMA,
+ CURRENT_SUBSET_SCHEMA,
+ CURRENT_VERSION_SCHEMA,
+ CURRENT_HERO_VERSION_SCHEMA,
+ CURRENT_REPRESENTATION_SCHEMA,
+ CURRENT_WORKFILE_INFO_SCHEMA,
+ CURRENT_THUMBNAIL_SCHEMA
)
- op_session.commit()
-
- # Load ProjectSettings for the project and save it to store all attributes
- # and Anatomy
- try:
- project_settings_entity = ProjectSettings(project_name)
- project_settings_entity.save()
- except SaveWarningExc as exc:
- print(str(exc))
- except Exception:
- op_session.delete_entity(
- project_name, project_doc["type"], create_op.entity_id
- )
- op_session.commit()
- raise
-
- project_doc = get_project(project_name)
-
- try:
- # Validate created project document
- validate(project_doc)
- except Exception:
- # Remove project if is not valid
- op_session.delete_entity(
- project_name, project_doc["type"], create_op.entity_id
- )
- op_session.commit()
- raise
-
- return project_doc
diff --git a/openpype/client/operations_base.py b/openpype/client/operations_base.py
new file mode 100644
index 0000000000..887b237b1c
--- /dev/null
+++ b/openpype/client/operations_base.py
@@ -0,0 +1,289 @@
+import uuid
+import copy
+from abc import ABCMeta, abstractmethod, abstractproperty
+import six
+
+REMOVED_VALUE = object()
+
+
+@six.add_metaclass(ABCMeta)
+class AbstractOperation(object):
+ """Base operation class.
+
+ Operation represent a call into database. The call can create, change or
+ remove data.
+
+ Args:
+ project_name (str): On which project operation will happen.
+ entity_type (str): Type of entity on which change happens.
+ e.g. 'asset', 'representation' etc.
+ """
+
+ def __init__(self, project_name, entity_type):
+ self._project_name = project_name
+ self._entity_type = entity_type
+ self._id = str(uuid.uuid4())
+
+ @property
+ def project_name(self):
+ return self._project_name
+
+ @property
+ def id(self):
+ """Identifier of operation."""
+
+ return self._id
+
+ @property
+ def entity_type(self):
+ return self._entity_type
+
+ @abstractproperty
+ def operation_name(self):
+ """Stringified type of operation."""
+
+ pass
+
+ def to_data(self):
+ """Convert operation to data that can be converted to json or others.
+
+ Warning:
+ Current state returns ObjectId objects which cannot be parsed by
+ json.
+
+ Returns:
+ Dict[str, Any]: Description of operation.
+ """
+
+ return {
+ "id": self._id,
+ "entity_type": self.entity_type,
+ "project_name": self.project_name,
+ "operation": self.operation_name
+ }
+
+
+class CreateOperation(AbstractOperation):
+ """Operation to create an entity.
+
+ Args:
+ project_name (str): On which project operation will happen.
+ entity_type (str): Type of entity on which change happens.
+ e.g. 'asset', 'representation' etc.
+ data (Dict[str, Any]): Data of entity that will be created.
+ """
+
+ operation_name = "create"
+
+ def __init__(self, project_name, entity_type, data):
+ super(CreateOperation, self).__init__(project_name, entity_type)
+
+ if not data:
+ data = {}
+ else:
+ data = copy.deepcopy(dict(data))
+ self._data = data
+
+ def __setitem__(self, key, value):
+ self.set_value(key, value)
+
+ def __getitem__(self, key):
+ return self.data[key]
+
+ def set_value(self, key, value):
+ self.data[key] = value
+
+ def get(self, key, *args, **kwargs):
+ return self.data.get(key, *args, **kwargs)
+
+ @abstractproperty
+ def entity_id(self):
+ pass
+
+ @property
+ def data(self):
+ return self._data
+
+ def to_data(self):
+ output = super(CreateOperation, self).to_data()
+ output["data"] = copy.deepcopy(self.data)
+ return output
+
+
+class UpdateOperation(AbstractOperation):
+ """Operation to update an entity.
+
+ Args:
+ project_name (str): On which project operation will happen.
+ entity_type (str): Type of entity on which change happens.
+ e.g. 'asset', 'representation' etc.
+ entity_id (Union[str, ObjectId]): Identifier of an entity.
+ update_data (Dict[str, Any]): Key -> value changes that will be set in
+ database. If value is set to 'REMOVED_VALUE' the key will be
+ removed. Only first level of dictionary is checked (on purpose).
+ """
+
+ operation_name = "update"
+
+ def __init__(self, project_name, entity_type, entity_id, update_data):
+ super(UpdateOperation, self).__init__(project_name, entity_type)
+
+ self._entity_id = entity_id
+ self._update_data = update_data
+
+ @property
+ def entity_id(self):
+ return self._entity_id
+
+ @property
+ def update_data(self):
+ return self._update_data
+
+ def to_data(self):
+ changes = {}
+ for key, value in self._update_data.items():
+ if value is REMOVED_VALUE:
+ value = None
+ changes[key] = value
+
+ output = super(UpdateOperation, self).to_data()
+ output.update({
+ "entity_id": self.entity_id,
+ "changes": changes
+ })
+ return output
+
+
+class DeleteOperation(AbstractOperation):
+ """Operation to delete an entity.
+
+ Args:
+ project_name (str): On which project operation will happen.
+ entity_type (str): Type of entity on which change happens.
+ e.g. 'asset', 'representation' etc.
+ entity_id (Union[str, ObjectId]): Entity id that will be removed.
+ """
+
+ operation_name = "delete"
+
+ def __init__(self, project_name, entity_type, entity_id):
+ super(DeleteOperation, self).__init__(project_name, entity_type)
+
+ self._entity_id = entity_id
+
+ @property
+ def entity_id(self):
+ return self._entity_id
+
+ def to_data(self):
+ output = super(DeleteOperation, self).to_data()
+ output["entity_id"] = self.entity_id
+ return output
+
+
+class BaseOperationsSession(object):
+ """Session storing operations that should happen in an order.
+
+ At this moment does not handle anything special can be considered as
+ stupid list of operations that will happen after each other. If creation
+ of same entity is there multiple times it's handled in any way and document
+ values are not validated.
+ """
+
+ def __init__(self):
+ self._operations = []
+
+ def __len__(self):
+ return len(self._operations)
+
+ def add(self, operation):
+ """Add operation to be processed.
+
+ Args:
+ operation (BaseOperation): Operation that should be processed.
+ """
+ if not isinstance(
+ operation,
+ (CreateOperation, UpdateOperation, DeleteOperation)
+ ):
+ raise TypeError("Expected Operation object got {}".format(
+ str(type(operation))
+ ))
+
+ self._operations.append(operation)
+
+ def append(self, operation):
+ """Add operation to be processed.
+
+ Args:
+ operation (BaseOperation): Operation that should be processed.
+ """
+
+ self.add(operation)
+
+ def extend(self, operations):
+ """Add operations to be processed.
+
+ Args:
+ operations (List[BaseOperation]): Operations that should be
+ processed.
+ """
+
+ for operation in operations:
+ self.add(operation)
+
+ def remove(self, operation):
+ """Remove operation."""
+
+ self._operations.remove(operation)
+
+ def clear(self):
+ """Clear all registered operations."""
+
+ self._operations = []
+
+ def to_data(self):
+ return [
+ operation.to_data()
+ for operation in self._operations
+ ]
+
+ @abstractmethod
+ def commit(self):
+ """Commit session operations."""
+ pass
+
+ def create_entity(self, project_name, entity_type, data):
+ """Fast access to 'CreateOperation'.
+
+ Returns:
+ CreateOperation: Object of update operation.
+ """
+
+ operation = CreateOperation(project_name, entity_type, data)
+ self.add(operation)
+ return operation
+
+ def update_entity(self, project_name, entity_type, entity_id, update_data):
+ """Fast access to 'UpdateOperation'.
+
+ Returns:
+ UpdateOperation: Object of update operation.
+ """
+
+ operation = UpdateOperation(
+ project_name, entity_type, entity_id, update_data
+ )
+ self.add(operation)
+ return operation
+
+ def delete_entity(self, project_name, entity_type, entity_id):
+ """Fast access to 'DeleteOperation'.
+
+ Returns:
+ DeleteOperation: Object of delete operation.
+ """
+
+ operation = DeleteOperation(project_name, entity_type, entity_id)
+ self.add(operation)
+ return operation
diff --git a/openpype/hosts/celaction/hooks/__init__.py b/openpype/client/server/__init__.py
similarity index 100%
rename from openpype/hosts/celaction/hooks/__init__.py
rename to openpype/client/server/__init__.py
diff --git a/openpype/client/server/constants.py b/openpype/client/server/constants.py
new file mode 100644
index 0000000000..1d3f94c702
--- /dev/null
+++ b/openpype/client/server/constants.py
@@ -0,0 +1,18 @@
+# --- Folders ---
+DEFAULT_FOLDER_FIELDS = {
+ "id",
+ "name",
+ "path",
+ "parentId",
+ "active",
+ "parents",
+ "thumbnailId"
+}
+
+REPRESENTATION_FILES_FIELDS = {
+ "files.name",
+ "files.hash",
+ "files.id",
+ "files.path",
+ "files.size",
+}
diff --git a/openpype/client/server/conversion_utils.py b/openpype/client/server/conversion_utils.py
new file mode 100644
index 0000000000..8c18cb1c13
--- /dev/null
+++ b/openpype/client/server/conversion_utils.py
@@ -0,0 +1,1339 @@
+import os
+import arrow
+import collections
+import json
+
+import six
+
+from openpype.client.operations_base import REMOVED_VALUE
+from openpype.client.mongo.operations import (
+ CURRENT_PROJECT_SCHEMA,
+ CURRENT_ASSET_DOC_SCHEMA,
+ CURRENT_SUBSET_SCHEMA,
+ CURRENT_VERSION_SCHEMA,
+ CURRENT_HERO_VERSION_SCHEMA,
+ CURRENT_REPRESENTATION_SCHEMA,
+ CURRENT_WORKFILE_INFO_SCHEMA,
+)
+from .constants import REPRESENTATION_FILES_FIELDS
+from .utils import create_entity_id, prepare_entity_changes
+
+# --- Project entity ---
+PROJECT_FIELDS_MAPPING_V3_V4 = {
+ "_id": {"name"},
+ "name": {"name"},
+ "data": {"data", "code"},
+ "data.library_project": {"library"},
+ "data.code": {"code"},
+ "data.active": {"active"},
+}
+
+# TODO this should not be hardcoded but received from server!!!
+# --- Folder entity ---
+FOLDER_FIELDS_MAPPING_V3_V4 = {
+ "_id": {"id"},
+ "name": {"name"},
+ "label": {"label"},
+ "data": {
+ "parentId", "parents", "active", "tasks", "thumbnailId"
+ },
+ "data.visualParent": {"parentId"},
+ "data.parents": {"parents"},
+ "data.active": {"active"},
+ "data.thumbnail_id": {"thumbnailId"},
+ "data.entityType": {"folderType"}
+}
+
+# --- Subset entity ---
+SUBSET_FIELDS_MAPPING_V3_V4 = {
+ "_id": {"id"},
+ "name": {"name"},
+ "data.active": {"active"},
+ "parent": {"folderId"}
+}
+
+# --- Version entity ---
+VERSION_FIELDS_MAPPING_V3_V4 = {
+ "_id": {"id"},
+ "name": {"version"},
+ "parent": {"productId"}
+}
+
+# --- Representation entity ---
+REPRESENTATION_FIELDS_MAPPING_V3_V4 = {
+ "_id": {"id"},
+ "name": {"name"},
+ "parent": {"versionId"},
+ "context": {"context"},
+ "files": {"files"},
+}
+
+
+def project_fields_v3_to_v4(fields, con):
+ """Convert project fields from v3 to v4 structure.
+
+ Args:
+ fields (Union[Iterable(str), None]): fields to be converted.
+
+ Returns:
+ Union[Set(str), None]: Converted fields to v4 fields.
+ """
+
+ # TODO config fields
+ # - config.apps
+ # - config.groups
+ if not fields:
+ return None
+
+ project_attribs = con.get_attributes_for_type("project")
+ output = set()
+ for field in fields:
+ # If config is needed the rest api call must be used
+ if field.startswith("config"):
+ return None
+
+ if field in PROJECT_FIELDS_MAPPING_V3_V4:
+ output |= PROJECT_FIELDS_MAPPING_V3_V4[field]
+ if field == "data":
+ output |= {
+ "attrib.{}".format(attr)
+ for attr in project_attribs
+ }
+
+ elif field.startswith("data"):
+ field_parts = field.split(".")
+ field_parts.pop(0)
+ data_key = ".".join(field_parts)
+ if data_key in project_attribs:
+ output.add("attrib.{}".format(data_key))
+ else:
+ output.add("data")
+ print("Requested specific key from data {}".format(data_key))
+
+ else:
+ raise ValueError("Unknown field mapping for {}".format(field))
+
+ if "name" not in output:
+ output.add("name")
+ return output
+
+
+def _get_default_template_name(templates):
+ default_template = None
+ for name, template in templates.items():
+ if name == "default":
+ return "default"
+
+ if default_template is None:
+ default_template = name
+
+ return default_template
+
+
+def _template_replacements_to_v3(template):
+ return (
+ template
+ .replace("{product[name]}", "{subset}")
+ .replace("{product[type]}", "{family}")
+ )
+
+
+def _convert_template_item(template):
+ # Others won't have 'directory'
+ if "directory" not in template:
+ return
+ folder = _template_replacements_to_v3(template.pop("directory"))
+ template["folder"] = folder
+ template["file"] = _template_replacements_to_v3(template["file"])
+ template["path"] = "/".join(
+ (folder, template["file"])
+ )
+
+
+def _fill_template_category(templates, cat_templates, cat_key):
+ default_template_name = _get_default_template_name(cat_templates)
+ for template_name, cat_template in cat_templates.items():
+ _convert_template_item(cat_template)
+ if template_name == default_template_name:
+ templates[cat_key] = cat_template
+ else:
+ new_name = "{}_{}".format(cat_key, template_name)
+ templates["others"][new_name] = cat_template
+
+
+def convert_v4_project_to_v3(project):
+ """Convert Project entity data from v4 structure to v3 structure.
+
+ Args:
+ project (Dict[str, Any]): Project entity queried from v4 server.
+
+ Returns:
+ Dict[str, Any]: Project converted to v3 structure.
+ """
+
+ if not project:
+ return project
+
+ project_name = project["name"]
+ output = {
+ "_id": project_name,
+ "name": project_name,
+ "schema": CURRENT_PROJECT_SCHEMA,
+ "type": "project"
+ }
+
+ data = project.get("data") or {}
+ attribs = project.get("attrib") or {}
+ apps_attr = attribs.pop("applications", None) or []
+ applications = [
+ {"name": app_name}
+ for app_name in apps_attr
+ ]
+ data.update(attribs)
+ if "tools" in data:
+ data["tools_env"] = data.pop("tools")
+
+ data["entityType"] = "Project"
+
+ config = {}
+ project_config = project.get("config")
+
+ if project_config:
+ config["apps"] = applications
+ config["roots"] = project_config["roots"]
+
+ templates = project_config["templates"]
+ templates["defaults"] = templates.pop("common", None) or {}
+
+ others_templates = templates.pop("others", None) or {}
+ new_others_templates = {}
+ templates["others"] = new_others_templates
+ for name, template in others_templates.items():
+ _convert_template_item(template)
+ new_others_templates[name] = template
+
+ for key in (
+ "work",
+ "publish",
+ "hero"
+ ):
+ cat_templates = templates.pop(key)
+ _fill_template_category(templates, cat_templates, key)
+
+ delivery_templates = templates.pop("delivery", None) or {}
+ new_delivery_templates = {}
+ for name, delivery_template in delivery_templates.items():
+ new_delivery_templates[name] = "/".join(
+ (delivery_template["directory"], delivery_template["file"])
+ )
+ templates["delivery"] = new_delivery_templates
+
+ config["templates"] = templates
+
+ if "taskTypes" in project:
+ task_types = project["taskTypes"]
+ new_task_types = {}
+ for task_type in task_types:
+ name = task_type.pop("name")
+ # Change 'shortName' to 'short_name'
+ task_type["short_name"] = task_type.pop("shortName", None)
+ new_task_types[name] = task_type
+
+ config["tasks"] = new_task_types
+
+ if config:
+ output["config"] = config
+
+ for data_key, key in (
+ ("library_project", "library"),
+ ("code", "code"),
+ ("active", "active")
+ ):
+ if key in project:
+ data[data_key] = project[key]
+
+ if "attrib" in project:
+ for key, value in project["attrib"].items():
+ data[key] = value
+
+ if data:
+ output["data"] = data
+ return output
+
+
+def folder_fields_v3_to_v4(fields, con):
+ """Convert folder fields from v3 to v4 structure.
+
+ Args:
+ fields (Union[Iterable(str), None]): fields to be converted.
+
+ Returns:
+ Union[Set(str), None]: Converted fields to v4 fields.
+ """
+
+ if not fields:
+ return None
+
+ folder_attributes = con.get_attributes_for_type("folder")
+ output = set()
+ for field in fields:
+ if field in ("schema", "type", "parent"):
+ continue
+
+ if field in FOLDER_FIELDS_MAPPING_V3_V4:
+ output |= FOLDER_FIELDS_MAPPING_V3_V4[field]
+ if field == "data":
+ output |= {
+ "attrib.{}".format(attr)
+ for attr in folder_attributes
+ }
+
+ elif field.startswith("data"):
+ field_parts = field.split(".")
+ field_parts.pop(0)
+ data_key = ".".join(field_parts)
+ if data_key == "label":
+ output.add("name")
+
+ elif data_key in ("icon", "color"):
+ continue
+
+ elif data_key.startswith("tasks"):
+ output.add("tasks")
+
+ elif data_key in folder_attributes:
+ output.add("attrib.{}".format(data_key))
+
+ else:
+ output.add("data")
+ print("Requested specific key from data {}".format(data_key))
+
+ else:
+ raise ValueError("Unknown field mapping for {}".format(field))
+
+ if "id" not in output:
+ output.add("id")
+ return output
+
+
+def convert_v4_tasks_to_v3(tasks):
+ """Convert v4 task item to v3 task.
+
+ Args:
+ tasks (List[Dict[str, Any]]): Task entites.
+
+ Returns:
+ Dict[str, Dict[str, Any]]: Tasks in v3 variant ready for v3 asset.
+ """
+
+ output = {}
+ for task in tasks:
+ task_name = task["name"]
+ new_task = {
+ "type": task["taskType"]
+ }
+ output[task_name] = new_task
+ return output
+
+
+def convert_v4_folder_to_v3(folder, project_name):
+ """Convert v4 folder to v3 asset.
+
+ Args:
+ folder (Dict[str, Any]): Folder entity data.
+ project_name (str): Project name from which folder was queried.
+
+ Returns:
+ Dict[str, Any]: Converted v4 folder to v3 asset.
+ """
+
+ output = {
+ "_id": folder["id"],
+ "parent": project_name,
+ "type": "asset",
+ "schema": CURRENT_ASSET_DOC_SCHEMA
+ }
+
+ output_data = folder.get("data") or {}
+
+ if "name" in folder:
+ output["name"] = folder["name"]
+ output_data["label"] = folder["name"]
+
+ if "folderType" in folder:
+ output_data["entityType"] = folder["folderType"]
+
+ for src_key, dst_key in (
+ ("parentId", "visualParent"),
+ ("active", "active"),
+ ("thumbnailId", "thumbnail_id"),
+ ("parents", "parents"),
+ ):
+ if src_key in folder:
+ output_data[dst_key] = folder[src_key]
+
+ if "attrib" in folder:
+ output_data.update(folder["attrib"])
+
+ if "tools" in output_data:
+ output_data["tools_env"] = output_data.pop("tools")
+
+ if "tasks" in folder:
+ output_data["tasks"] = convert_v4_tasks_to_v3(folder["tasks"])
+
+ output["data"] = output_data
+
+ return output
+
+
+def subset_fields_v3_to_v4(fields, con):
+ """Convert subset fields from v3 to v4 structure.
+
+ Args:
+ fields (Union[Iterable(str), None]): fields to be converted.
+
+ Returns:
+ Union[Set(str), None]: Converted fields to v4 fields.
+ """
+
+ if not fields:
+ return None
+
+ product_attributes = con.get_attributes_for_type("product")
+
+ output = set()
+ for field in fields:
+ if field in ("schema", "type"):
+ continue
+
+ if field in SUBSET_FIELDS_MAPPING_V3_V4:
+ output |= SUBSET_FIELDS_MAPPING_V3_V4[field]
+
+ elif field == "data":
+ output.add("productType")
+ output.add("active")
+ output |= {
+ "attrib.{}".format(attr)
+ for attr in product_attributes
+ }
+
+ elif field.startswith("data"):
+ field_parts = field.split(".")
+ field_parts.pop(0)
+ data_key = ".".join(field_parts)
+ if data_key in ("family", "families"):
+ output.add("productType")
+
+ elif data_key in product_attributes:
+ output.add("attrib.{}".format(data_key))
+
+ else:
+ output.add("data")
+ print("Requested specific key from data {}".format(data_key))
+
+ else:
+ raise ValueError("Unknown field mapping for {}".format(field))
+
+ if "id" not in output:
+ output.add("id")
+ return output
+
+
+def convert_v4_subset_to_v3(subset):
+ output = {
+ "_id": subset["id"],
+ "type": "subset",
+ "schema": CURRENT_SUBSET_SCHEMA
+ }
+ if "folderId" in subset:
+ output["parent"] = subset["folderId"]
+
+ output_data = subset.get("data") or {}
+
+ if "name" in subset:
+ output["name"] = subset["name"]
+
+ if "active" in subset:
+ output_data["active"] = subset["active"]
+
+ if "attrib" in subset:
+ attrib = subset["attrib"]
+ if "productGroup" in attrib:
+ attrib["subsetGroup"] = attrib.pop("productGroup")
+ output_data.update(attrib)
+
+ family = subset.get("productType")
+ if family:
+ output_data["family"] = family
+ output_data["families"] = [family]
+
+ output["data"] = output_data
+
+ return output
+
+
+def version_fields_v3_to_v4(fields, con):
+ """Convert version fields from v3 to v4 structure.
+
+ Args:
+ fields (Union[Iterable(str), None]): fields to be converted.
+
+ Returns:
+ Union[Set(str), None]: Converted fields to v4 fields.
+ """
+
+ if not fields:
+ return None
+
+ version_attributes = con.get_attributes_for_type("version")
+
+ output = set()
+ for field in fields:
+ if field in ("type", "schema", "version_id"):
+ continue
+
+ if field in VERSION_FIELDS_MAPPING_V3_V4:
+ output |= VERSION_FIELDS_MAPPING_V3_V4[field]
+
+ elif field == "data":
+ output |= {
+ "attrib.{}".format(attr)
+ for attr in version_attributes
+ }
+ output |= {
+ "author",
+ "createdAt",
+ "thumbnailId",
+ }
+
+ elif field.startswith("data"):
+ field_parts = field.split(".")
+ field_parts.pop(0)
+ data_key = ".".join(field_parts)
+ if data_key in version_attributes:
+ output.add("attrib.{}".format(data_key))
+
+ elif data_key == "thumbnail_id":
+ output.add("thumbnailId")
+
+ elif data_key == "time":
+ output.add("createdAt")
+
+ elif data_key == "author":
+ output.add("author")
+
+ elif data_key in ("tags", ):
+ continue
+
+ else:
+ output.add("data")
+ print("Requested specific key from data {}".format(data_key))
+
+ else:
+ raise ValueError("Unknown field mapping for {}".format(field))
+
+ if "id" not in output:
+ output.add("id")
+ return output
+
+
+def convert_v4_version_to_v3(version):
+ """Convert v4 version entity to v4 version.
+
+ Args:
+ version (Dict[str, Any]): Queried v4 version entity.
+
+ Returns:
+ Dict[str, Any]: Conveted version entity to v3 structure.
+ """
+
+ version_num = version["version"]
+ if version_num < 0:
+ output = {
+ "_id": version["id"],
+ "type": "hero_version",
+ "schema": CURRENT_HERO_VERSION_SCHEMA,
+ }
+ if "productId" in version:
+ output["parent"] = version["productId"]
+
+ if "data" in version:
+ output["data"] = version["data"]
+ return output
+
+ output = {
+ "_id": version["id"],
+ "type": "version",
+ "name": version_num,
+ "schema": CURRENT_VERSION_SCHEMA
+ }
+ if "productId" in version:
+ output["parent"] = version["productId"]
+
+ output_data = version.get("data") or {}
+ if "attrib" in version:
+ output_data.update(version["attrib"])
+
+ for src_key, dst_key in (
+ ("active", "active"),
+ ("thumbnailId", "thumbnail_id"),
+ ("author", "author")
+ ):
+ if src_key in version:
+ output_data[dst_key] = version[src_key]
+
+ if "createdAt" in version:
+ created_at = arrow.get(version["createdAt"])
+ output_data["time"] = created_at.strftime("%Y%m%dT%H%M%SZ")
+
+ output["data"] = output_data
+
+ return output
+
+
+def representation_fields_v3_to_v4(fields, con):
+ """Convert representation fields from v3 to v4 structure.
+
+ Args:
+ fields (Union[Iterable(str), None]): fields to be converted.
+
+ Returns:
+ Union[Set(str), None]: Converted fields to v4 fields.
+ """
+
+ if not fields:
+ return None
+
+ representation_attributes = con.get_attributes_for_type("representation")
+
+ output = set()
+ for field in fields:
+ if field in ("type", "schema"):
+ continue
+
+ if field in REPRESENTATION_FIELDS_MAPPING_V3_V4:
+ output |= REPRESENTATION_FIELDS_MAPPING_V3_V4[field]
+
+ elif field.startswith("context"):
+ output.add("context")
+
+ # TODO: 'files' can have specific attributes but the keys in v3 and v4
+ # are not the same (content is not the same)
+ elif field.startswith("files"):
+ output |= REPRESENTATION_FILES_FIELDS
+
+ elif field.startswith("data"):
+ output |= {
+ "attrib.{}".format(attr)
+ for attr in representation_attributes
+ }
+
+ else:
+ raise ValueError("Unknown field mapping for {}".format(field))
+
+ if "id" not in output:
+ output.add("id")
+ return output
+
+
+def convert_v4_representation_to_v3(representation):
+ """Convert v4 representation to v3 representation.
+
+ Args:
+ representation (Dict[str, Any]): Queried representation from v4 server.
+
+ Returns:
+ Dict[str, Any]: Converted representation to v3 structure.
+ """
+
+ output = {
+ "type": "representation",
+ "schema": CURRENT_REPRESENTATION_SCHEMA,
+ }
+ if "id" in representation:
+ output["_id"] = representation["id"]
+
+ for v3_key, v4_key in (
+ ("name", "name"),
+ ("parent", "versionId")
+ ):
+ if v4_key in representation:
+ output[v3_key] = representation[v4_key]
+
+ if "context" in representation:
+ context = representation["context"]
+ if isinstance(context, six.string_types):
+ context = json.loads(context)
+
+ if "asset" not in context and "folder" in context:
+ _c_folder = context["folder"]
+ context["asset"] = _c_folder["name"]
+
+ elif "asset" in context and "folder" not in context:
+ context["folder"] = {"name": context["asset"]}
+
+ if "product" in context:
+ _c_product = context.pop("product")
+ context["family"] = _c_product["type"]
+ context["subset"] = _c_product["name"]
+
+ output["context"] = context
+
+ if "files" in representation:
+ files = representation["files"]
+ new_files = []
+ # From GraphQl is list
+ if isinstance(files, list):
+ for file_info in files:
+ file_info["_id"] = file_info["id"]
+ new_files.append(file_info)
+
+ # From RestPoint is dictionary
+ elif isinstance(files, dict):
+ for file_id, file_info in files:
+ file_info["_id"] = file_id
+ new_files.append(file_info)
+
+ for file_info in new_files:
+ if not file_info.get("sites"):
+ file_info["sites"] = [{
+ "name": "studio"
+ }]
+
+ output["files"] = new_files
+
+ if representation.get("active") is False:
+ output["type"] = "archived_representation"
+ output["old_id"] = output["_id"]
+
+ output_data = representation.get("data") or {}
+ if "attrib" in representation:
+ output_data.update(representation["attrib"])
+
+ for key, data_key in (
+ ("active", "active"),
+ ):
+ if key in representation:
+ output_data[data_key] = representation[key]
+
+ if "template" in output_data:
+ output_data["template"] = (
+ output_data["template"]
+ .replace("{product[name]}", "{subset}")
+ .replace("{product[type]}", "{family}")
+ )
+
+ output["data"] = output_data
+
+ return output
+
+
+def workfile_info_fields_v3_to_v4(fields):
+ if not fields:
+ return None
+
+ new_fields = set()
+ fields = set(fields)
+ for v3_key, v4_key in (
+ ("_id", "id"),
+ ("files", "path"),
+ ("filename", "name"),
+ ("data", "data"),
+ ):
+ if v3_key in fields:
+ new_fields.add(v4_key)
+
+ if "parent" in fields or "task_name" in fields:
+ new_fields.add("taskId")
+
+ return new_fields
+
+
+def convert_v4_workfile_info_to_v3(workfile_info, task):
+ output = {
+ "type": "workfile",
+ "schema": CURRENT_WORKFILE_INFO_SCHEMA,
+ }
+ if "id" in workfile_info:
+ output["_id"] = workfile_info["id"]
+
+ if "path" in workfile_info:
+ output["files"] = [workfile_info["path"]]
+
+ if "name" in workfile_info:
+ output["filename"] = workfile_info["name"]
+
+ if "taskId" in workfile_info:
+ output["task_name"] = task["name"]
+ output["parent"] = task["folderId"]
+
+ return output
+
+
+def convert_create_asset_to_v4(asset, project, con):
+ folder_attributes = con.get_attributes_for_type("folder")
+
+ asset_data = asset["data"]
+ parent_id = asset_data["visualParent"]
+
+ folder = {
+ "name": asset["name"],
+ "parentId": parent_id,
+ }
+ entity_id = asset.get("_id")
+ if entity_id:
+ folder["id"] = entity_id
+
+ attribs = {}
+ data = {}
+ for key, value in asset_data.items():
+ if key in (
+ "visualParent",
+ "thumbnail_id",
+ "parents",
+ "inputLinks",
+ "avalon_mongo_id",
+ ):
+ continue
+
+ if key not in folder_attributes:
+ data[key] = value
+ elif value is not None:
+ attribs[key] = value
+
+ if attribs:
+ folder["attrib"] = attribs
+
+ if data:
+ folder["data"] = data
+ return folder
+
+
+def convert_create_task_to_v4(task, project, con):
+ if not project["taskTypes"]:
+ raise ValueError(
+ "Project \"{}\" does not have any task types".format(
+ project["name"]))
+
+ task_type = task["type"]
+ if task_type not in project["taskTypes"]:
+ task_type = tuple(project["taskTypes"].keys())[0]
+
+ return {
+ "name": task["name"],
+ "taskType": task_type,
+ "folderId": task["folderId"]
+ }
+
+
+def convert_create_subset_to_v4(subset, con):
+ product_attributes = con.get_attributes_for_type("product")
+
+ subset_data = subset["data"]
+ product_type = subset_data.get("family")
+ if not product_type:
+ product_type = subset_data["families"][0]
+
+ converted_product = {
+ "name": subset["name"],
+ "productType": product_type,
+ "folderId": subset["parent"],
+ }
+ entity_id = subset.get("_id")
+ if entity_id:
+ converted_product["id"] = entity_id
+
+ attribs = {}
+ data = {}
+ if "subsetGroup" in subset_data:
+ subset_data["productGroup"] = subset_data.pop("subsetGroup")
+ for key, value in subset_data.items():
+ if key not in product_attributes:
+ data[key] = value
+ elif value is not None:
+ attribs[key] = value
+
+ if attribs:
+ converted_product["attrib"] = attribs
+
+ if data:
+ converted_product["data"] = data
+
+ return converted_product
+
+
+def convert_create_version_to_v4(version, con):
+ version_attributes = con.get_attributes_for_type("version")
+ converted_version = {
+ "version": version["name"],
+ "productId": version["parent"],
+ }
+ entity_id = version.get("_id")
+ if entity_id:
+ converted_version["id"] = entity_id
+
+ version_data = version["data"]
+ attribs = {}
+ data = {}
+ for key, value in version_data.items():
+ if key not in version_attributes:
+ data[key] = value
+ elif value is not None:
+ attribs[key] = value
+
+ if attribs:
+ converted_version["attrib"] = attribs
+
+ if data:
+ converted_version["data"] = attribs
+
+ return converted_version
+
+
+def convert_create_hero_version_to_v4(hero_version, project_name, con):
+ if "version_id" in hero_version:
+ version_id = hero_version["version_id"]
+ version = con.get_version_by_id(project_name, version_id)
+ version["version"] = - version["version"]
+
+ for auto_key in (
+ "name",
+ "createdAt",
+ "updatedAt",
+ "author",
+ ):
+ version.pop(auto_key, None)
+
+ return version
+
+ version_attributes = con.get_attributes_for_type("version")
+ converted_version = {
+ "version": hero_version["version"],
+ "productId": hero_version["parent"],
+ }
+ entity_id = hero_version.get("_id")
+ if entity_id:
+ converted_version["id"] = entity_id
+
+ version_data = hero_version["data"]
+ attribs = {}
+ data = {}
+ for key, value in version_data.items():
+ if key not in version_attributes:
+ data[key] = value
+ elif value is not None:
+ attribs[key] = value
+
+ if attribs:
+ converted_version["attrib"] = attribs
+
+ if data:
+ converted_version["data"] = attribs
+
+ return converted_version
+
+
+def convert_create_representation_to_v4(representation, con):
+ representation_attributes = con.get_attributes_for_type("representation")
+
+ converted_representation = {
+ "name": representation["name"],
+ "versionId": representation["parent"],
+ }
+ entity_id = representation.get("_id")
+ if entity_id:
+ converted_representation["id"] = entity_id
+
+ if representation.get("type") == "archived_representation":
+ converted_representation["active"] = False
+
+ new_files = []
+ for file_item in representation["files"]:
+ new_file_item = {
+ key: value
+ for key, value in file_item.items()
+ if key in ("hash", "path", "size")
+ }
+ new_file_item.update({
+ "id": create_entity_id(),
+ "hash_type": "op3",
+ "name": os.path.basename(new_file_item["path"])
+ })
+ new_files.append(new_file_item)
+
+ converted_representation["files"] = new_files
+
+ context = representation["context"]
+ if "folder" not in context:
+ context["folder"] = {
+ "name": context.get("asset")
+ }
+
+ context["product"] = {
+ "type": context.pop("family", None),
+ "name": context.pop("subset", None),
+ }
+
+ attribs = {}
+ data = {
+ "context": context,
+ }
+
+ representation_data = representation["data"]
+ representation_data["template"] = (
+ representation_data["template"]
+ .replace("{subset}", "{product[name]}")
+ .replace("{family}", "{product[type]}")
+ )
+
+ for key, value in representation_data.items():
+ if key not in representation_attributes:
+ data[key] = value
+ elif value is not None:
+ attribs[key] = value
+
+ if attribs:
+ converted_representation["attrib"] = attribs
+
+ if data:
+ converted_representation["data"] = data
+
+ return converted_representation
+
+
+def convert_create_workfile_info_to_v4(data, project_name, con):
+ folder_id = data["parent"]
+ task_name = data["task_name"]
+ task = con.get_task_by_name(project_name, folder_id, task_name)
+ if not task:
+ return None
+
+ workfile_attributes = con.get_attributes_for_type("workfile")
+ filename = data["filename"]
+ possible_attribs = {
+ "extension": os.path.splitext(filename)[-1]
+ }
+ attribs = {}
+ for attr in workfile_attributes:
+ if attr in possible_attribs:
+ attribs[attr] = possible_attribs[attr]
+
+ output = {
+ "path": data["files"][0],
+ "name": filename,
+ "taskId": task["id"]
+ }
+ if "_id" in data:
+ output["id"] = data["_id"]
+
+ if attribs:
+ output["attrib"] = attribs
+
+ output_data = data.get("data")
+ if output_data:
+ output["data"] = output_data
+ return output
+
+
+def _from_flat_dict(data):
+ output = {}
+ for key, value in data.items():
+ output_value = output
+ subkeys = key.split(".")
+ last_key = subkeys.pop(-1)
+ for subkey in subkeys:
+ if subkey not in output_value:
+ output_value[subkey] = {}
+ output_value = output_value[subkey]
+
+ output_value[last_key] = value
+ return output
+
+
+def _to_flat_dict(data):
+ output = {}
+ flat_queue = collections.deque()
+ flat_queue.append(([], data))
+ while flat_queue:
+ item = flat_queue.popleft()
+ parent_keys, data = item
+ for key, value in data.items():
+ keys = list(parent_keys)
+ keys.append(key)
+ if isinstance(value, dict):
+ flat_queue.append((keys, value))
+ else:
+ full_key = ".".join(keys)
+ output[full_key] = value
+
+ return output
+
+
+def convert_update_folder_to_v4(project_name, asset_id, update_data, con):
+ new_update_data = {}
+
+ folder_attributes = con.get_attributes_for_type("folder")
+ full_update_data = _from_flat_dict(update_data)
+ data = full_update_data.get("data")
+
+ has_new_parent = False
+ has_task_changes = False
+ parent_id = None
+ tasks = None
+ new_data = {}
+ attribs = full_update_data.pop("attrib", {})
+ if "type" in update_data:
+ new_update_data["active"] = update_data["type"] == "asset"
+
+ if data:
+ if "thumbnail_id" in data:
+ new_update_data["thumbnailId"] = data.pop("thumbnail_id")
+
+ if "tasks" in data:
+ tasks = data.pop("tasks")
+ has_task_changes = True
+
+ if "visualParent" in data:
+ has_new_parent = True
+ parent_id = data.pop("visualParent")
+
+ for key, value in data.items():
+ if key in folder_attributes:
+ attribs[key] = value
+ else:
+ new_data[key] = value
+
+ if "name" in update_data:
+ new_update_data["name"] = update_data["name"]
+
+ if "type" in update_data:
+ new_type = update_data["type"]
+ if new_type == "asset":
+ new_update_data["active"] = True
+ elif new_type == "archived_asset":
+ new_update_data["active"] = False
+
+ if has_new_parent:
+ new_update_data["parentId"] = parent_id
+
+ if new_data:
+ print("Folder has new data: {}".format(new_data))
+ new_update_data["data"] = new_data
+
+ if attribs:
+ new_update_data["attrib"] = attribs
+
+ if has_task_changes:
+ raise ValueError("Task changes of folder are not implemented")
+
+ return _to_flat_dict(new_update_data)
+
+
+def convert_update_subset_to_v4(project_name, subset_id, update_data, con):
+ new_update_data = {}
+
+ product_attributes = con.get_attributes_for_type("product")
+ full_update_data = _from_flat_dict(update_data)
+ data = full_update_data.get("data")
+ new_data = {}
+ attribs = full_update_data.pop("attrib", {})
+ if data:
+ if "family" in data:
+ family = data.pop("family")
+ new_update_data["productType"] = family
+
+ if "families" in data:
+ families = data.pop("families")
+ if "productType" not in new_update_data:
+ new_update_data["productType"] = families[0]
+
+ if "subsetGroup" in data:
+ data["productGroup"] = data.pop("subsetGroup")
+ for key, value in data.items():
+ if key in product_attributes:
+ if value is REMOVED_VALUE:
+ value = None
+ attribs[key] = value
+
+ elif value is not REMOVED_VALUE:
+ new_data[key] = value
+
+ if "name" in update_data:
+ new_update_data["name"] = update_data["name"]
+
+ if "type" in update_data:
+ new_type = update_data["type"]
+ if new_type == "subset":
+ new_update_data["active"] = True
+ elif new_type == "archived_subset":
+ new_update_data["active"] = False
+
+ if "parent" in update_data:
+ new_update_data["folderId"] = update_data["parent"]
+
+ flat_data = _to_flat_dict(new_update_data)
+ if attribs:
+ flat_data["attrib"] = attribs
+
+ if new_data:
+ print("Subset has new data: {}".format(new_data))
+ flat_data["data"] = new_data
+
+ return flat_data
+
+
+def convert_update_version_to_v4(project_name, version_id, update_data, con):
+ new_update_data = {}
+
+ version_attributes = con.get_attributes_for_type("version")
+ full_update_data = _from_flat_dict(update_data)
+ data = full_update_data.get("data")
+ new_data = {}
+ attribs = full_update_data.pop("attrib", {})
+ if data:
+ if "author" in data:
+ new_update_data["author"] = data.pop("author")
+
+ if "thumbnail_id" in data:
+ new_update_data["thumbnailId"] = data.pop("thumbnail_id")
+
+ for key, value in data.items():
+ if key in version_attributes:
+ if value is REMOVED_VALUE:
+ value = None
+ attribs[key] = value
+
+ elif value is not REMOVED_VALUE:
+ new_data[key] = value
+
+ if "name" in update_data:
+ new_update_data["version"] = update_data["name"]
+
+ if "type" in update_data:
+ new_type = update_data["type"]
+ if new_type == "version":
+ new_update_data["active"] = True
+ elif new_type == "archived_version":
+ new_update_data["active"] = False
+
+ if "parent" in update_data:
+ new_update_data["productId"] = update_data["parent"]
+
+ flat_data = _to_flat_dict(new_update_data)
+ if attribs:
+ flat_data["attrib"] = attribs
+
+ if new_data:
+ print("Version has new data: {}".format(new_data))
+ flat_data["data"] = new_data
+ return flat_data
+
+
+def convert_update_hero_version_to_v4(
+ project_name, hero_version_id, update_data, con
+):
+ if "version_id" not in update_data:
+ return None
+
+ version_id = update_data["version_id"]
+ hero_version = con.get_hero_version_by_id(project_name, hero_version_id)
+ version = con.get_version_by_id(project_name, version_id)
+ version["version"] = - version["version"]
+ version["id"] = hero_version_id
+
+ for auto_key in (
+ "name",
+ "createdAt",
+ "updatedAt",
+ "author",
+ ):
+ version.pop(auto_key, None)
+
+ return prepare_entity_changes(hero_version, version)
+
+
+def convert_update_representation_to_v4(
+ project_name, repre_id, update_data, con
+):
+ new_update_data = {}
+
+ folder_attributes = con.get_attributes_for_type("folder")
+ full_update_data = _from_flat_dict(update_data)
+ data = full_update_data.get("data")
+
+ new_data = {}
+ attribs = full_update_data.pop("attrib", {})
+ if data:
+ for key, value in data.items():
+ if key in folder_attributes:
+ attribs[key] = value
+ else:
+ new_data[key] = value
+
+ if "template" in attribs:
+ attribs["template"] = (
+ attribs["template"]
+ .replace("{family}", "{product[type]}")
+ .replace("{subset}", "{product[name]}")
+ )
+
+ if "name" in update_data:
+ new_update_data["name"] = update_data["name"]
+
+ if "type" in update_data:
+ new_type = update_data["type"]
+ if new_type == "representation":
+ new_update_data["active"] = True
+ elif new_type == "archived_representation":
+ new_update_data["active"] = False
+
+ if "parent" in update_data:
+ new_update_data["versionId"] = update_data["parent"]
+
+ if "context" in update_data:
+ context = update_data["context"]
+ if "folder" not in context and "asset" in context:
+ context["folder"] = {"name": context.pop("asset")}
+
+ if "family" in context or "subset" in context:
+ context["product"] = {
+ "name": context.pop("subset"),
+ "type": context.pop("family"),
+ }
+ new_data["context"] = context
+
+ if "files" in update_data:
+ new_files = update_data["files"]
+ if isinstance(new_files, dict):
+ new_files = list(new_files.values())
+
+ for item in new_files:
+ for key in tuple(item.keys()):
+ if key not in ("hash", "path", "size"):
+ item.pop(key)
+ item.update({
+ "id": create_entity_id(),
+ "name": os.path.basename(item["path"]),
+ "hash_type": "op3",
+ })
+ new_update_data["files"] = new_files
+
+ flat_data = _to_flat_dict(new_update_data)
+ if attribs:
+ flat_data["attrib"] = attribs
+
+ if new_data:
+ print("Representation has new data: {}".format(new_data))
+ flat_data["data"] = new_data
+
+ return flat_data
+
+
+def convert_update_workfile_info_to_v4(
+ project_name, workfile_id, update_data, con
+):
+ return {
+ key: value
+ for key, value in update_data.items()
+ if key.startswith("data")
+ }
diff --git a/openpype/client/server/entities.py b/openpype/client/server/entities.py
new file mode 100644
index 0000000000..16223d3d91
--- /dev/null
+++ b/openpype/client/server/entities.py
@@ -0,0 +1,694 @@
+import collections
+
+from ayon_api import get_server_api_connection
+
+from openpype.client.mongo.operations import CURRENT_THUMBNAIL_SCHEMA
+
+from .openpype_comp import get_folders_with_tasks
+from .conversion_utils import (
+ project_fields_v3_to_v4,
+ convert_v4_project_to_v3,
+
+ folder_fields_v3_to_v4,
+ convert_v4_folder_to_v3,
+
+ subset_fields_v3_to_v4,
+ convert_v4_subset_to_v3,
+
+ version_fields_v3_to_v4,
+ convert_v4_version_to_v3,
+
+ representation_fields_v3_to_v4,
+ convert_v4_representation_to_v3,
+
+ workfile_info_fields_v3_to_v4,
+ convert_v4_workfile_info_to_v3,
+)
+
+
+def get_projects(active=True, inactive=False, library=None, fields=None):
+ if not active and not inactive:
+ return
+
+ if active and inactive:
+ active = None
+ elif active:
+ active = True
+ elif inactive:
+ active = False
+
+ con = get_server_api_connection()
+ fields = project_fields_v3_to_v4(fields, con)
+ for project in con.get_projects(active, library, fields=fields):
+ yield convert_v4_project_to_v3(project)
+
+
+def get_project(project_name, active=True, inactive=False, fields=None):
+ # Skip if both are disabled
+ con = get_server_api_connection()
+ fields = project_fields_v3_to_v4(fields, con)
+ return convert_v4_project_to_v3(
+ con.get_project(project_name, fields=fields)
+ )
+
+
+def get_whole_project(*args, **kwargs):
+ raise NotImplementedError("'get_whole_project' not implemented")
+
+
+def _get_subsets(
+ project_name,
+ subset_ids=None,
+ subset_names=None,
+ folder_ids=None,
+ names_by_folder_ids=None,
+ archived=False,
+ fields=None
+):
+ # Convert fields and add minimum required fields
+ con = get_server_api_connection()
+ fields = subset_fields_v3_to_v4(fields, con)
+ if fields is not None:
+ for key in (
+ "id",
+ "active"
+ ):
+ fields.add(key)
+
+ active = True
+ if archived:
+ active = None
+
+ for subset in con.get_products(
+ project_name,
+ subset_ids,
+ subset_names,
+ folder_ids=folder_ids,
+ names_by_folder_ids=names_by_folder_ids,
+ active=active,
+ fields=fields,
+ ):
+ yield convert_v4_subset_to_v3(subset)
+
+
+def _get_versions(
+ project_name,
+ version_ids=None,
+ subset_ids=None,
+ versions=None,
+ hero=True,
+ standard=True,
+ latest=None,
+ active=None,
+ fields=None
+):
+ con = get_server_api_connection()
+
+ fields = version_fields_v3_to_v4(fields, con)
+
+ # Make sure 'productId' and 'version' are available when hero versions
+ # are queried
+ if fields and hero:
+ fields = set(fields)
+ fields |= {"productId", "version"}
+
+ queried_versions = con.get_versions(
+ project_name,
+ version_ids,
+ subset_ids,
+ versions,
+ hero,
+ standard,
+ latest,
+ active=active,
+ fields=fields
+ )
+
+ versions = []
+ hero_versions = []
+ for version in queried_versions:
+ if version["version"] < 0:
+ hero_versions.append(version)
+ else:
+ versions.append(convert_v4_version_to_v3(version))
+
+ if hero_versions:
+ subset_ids = set()
+ versions_nums = set()
+ for hero_version in hero_versions:
+ versions_nums.add(abs(hero_version["version"]))
+ subset_ids.add(hero_version["productId"])
+
+ hero_eq_versions = con.get_versions(
+ project_name,
+ product_ids=subset_ids,
+ versions=versions_nums,
+ hero=False,
+ fields=["id", "version", "productId"]
+ )
+ hero_eq_by_subset_id = collections.defaultdict(list)
+ for version in hero_eq_versions:
+ hero_eq_by_subset_id[version["productId"]].append(version)
+
+ for hero_version in hero_versions:
+ abs_version = abs(hero_version["version"])
+ subset_id = hero_version["productId"]
+ version_id = None
+ for version in hero_eq_by_subset_id.get(subset_id, []):
+ if version["version"] == abs_version:
+ version_id = version["id"]
+ break
+ conv_hero = convert_v4_version_to_v3(hero_version)
+ conv_hero["version_id"] = version_id
+ versions.append(conv_hero)
+
+ return versions
+
+
+def get_asset_by_id(project_name, asset_id, fields=None):
+ assets = get_assets(
+ project_name, asset_ids=[asset_id], fields=fields
+ )
+ for asset in assets:
+ return asset
+ return None
+
+
+def get_asset_by_name(project_name, asset_name, fields=None):
+ assets = get_assets(
+ project_name, asset_names=[asset_name], fields=fields
+ )
+ for asset in assets:
+ return asset
+ return None
+
+
+def get_assets(
+ project_name,
+ asset_ids=None,
+ asset_names=None,
+ parent_ids=None,
+ archived=False,
+ fields=None
+):
+ if not project_name:
+ return
+
+ active = True
+ if archived:
+ active = None
+
+ con = get_server_api_connection()
+ fields = folder_fields_v3_to_v4(fields, con)
+ kwargs = dict(
+ folder_ids=asset_ids,
+ folder_names=asset_names,
+ parent_ids=parent_ids,
+ active=active,
+ fields=fields
+ )
+
+ if fields is None or "tasks" in fields:
+ folders = get_folders_with_tasks(con, project_name, **kwargs)
+
+ else:
+ folders = con.get_folders(project_name, **kwargs)
+
+ for folder in folders:
+ yield convert_v4_folder_to_v3(folder, project_name)
+
+
+def get_archived_assets(
+ project_name,
+ asset_ids=None,
+ asset_names=None,
+ parent_ids=None,
+ fields=None
+):
+ return get_assets(
+ project_name,
+ asset_ids,
+ asset_names,
+ parent_ids,
+ True,
+ fields
+ )
+
+
+def get_asset_ids_with_subsets(project_name, asset_ids=None):
+ con = get_server_api_connection()
+ return con.get_folder_ids_with_products(project_name, asset_ids)
+
+
+def get_subset_by_id(project_name, subset_id, fields=None):
+ subsets = get_subsets(
+ project_name, subset_ids=[subset_id], fields=fields
+ )
+ for subset in subsets:
+ return subset
+ return None
+
+
+def get_subset_by_name(project_name, subset_name, asset_id, fields=None):
+ subsets = get_subsets(
+ project_name,
+ subset_names=[subset_name],
+ asset_ids=[asset_id],
+ fields=fields
+ )
+ for subset in subsets:
+ return subset
+ return None
+
+
+def get_subsets(
+ project_name,
+ subset_ids=None,
+ subset_names=None,
+ asset_ids=None,
+ names_by_asset_ids=None,
+ archived=False,
+ fields=None
+):
+ return _get_subsets(
+ project_name,
+ subset_ids,
+ subset_names,
+ asset_ids,
+ names_by_asset_ids,
+ archived,
+ fields=fields
+ )
+
+
+def get_subset_families(project_name, subset_ids=None):
+ con = get_server_api_connection()
+ return con.get_product_type_names(project_name, subset_ids)
+
+
+def get_version_by_id(project_name, version_id, fields=None):
+ versions = get_versions(
+ project_name,
+ version_ids=[version_id],
+ fields=fields,
+ hero=True
+ )
+ for version in versions:
+ return version
+ return None
+
+
+def get_version_by_name(project_name, version, subset_id, fields=None):
+ versions = get_versions(
+ project_name,
+ subset_ids=[subset_id],
+ versions=[version],
+ fields=fields
+ )
+ for version in versions:
+ return version
+ return None
+
+
+def get_versions(
+ project_name,
+ version_ids=None,
+ subset_ids=None,
+ versions=None,
+ hero=False,
+ fields=None
+):
+ return _get_versions(
+ project_name,
+ version_ids,
+ subset_ids,
+ versions,
+ hero=hero,
+ standard=True,
+ fields=fields
+ )
+
+
+def get_hero_version_by_id(project_name, version_id, fields=None):
+ versions = get_hero_versions(
+ project_name,
+ version_ids=[version_id],
+ fields=fields
+ )
+ for version in versions:
+ return version
+ return None
+
+
+def get_hero_version_by_subset_id(
+ project_name, subset_id, fields=None
+):
+ versions = get_hero_versions(
+ project_name,
+ subset_ids=[subset_id],
+ fields=fields
+ )
+ for version in versions:
+ return version
+ return None
+
+
+def get_hero_versions(
+ project_name, subset_ids=None, version_ids=None, fields=None
+):
+ return _get_versions(
+ project_name,
+ version_ids=version_ids,
+ subset_ids=subset_ids,
+ hero=True,
+ standard=False,
+ fields=fields
+ )
+
+
+def get_last_versions(project_name, subset_ids, active=None, fields=None):
+ if fields:
+ fields = set(fields)
+ fields.add("parent")
+
+ versions = _get_versions(
+ project_name,
+ subset_ids=subset_ids,
+ latest=True,
+ hero=False,
+ active=active,
+ fields=fields
+ )
+ return {
+ version["parent"]: version
+ for version in versions
+ }
+
+
+def get_last_version_by_subset_id(project_name, subset_id, fields=None):
+ versions = _get_versions(
+ project_name,
+ subset_ids=[subset_id],
+ latest=True,
+ hero=False,
+ fields=fields
+ )
+ if not versions:
+ return None
+ return versions[0]
+
+
+def get_last_version_by_subset_name(
+ project_name,
+ subset_name,
+ asset_id=None,
+ asset_name=None,
+ fields=None
+):
+ if not asset_id and not asset_name:
+ return None
+
+ if not asset_id:
+ asset = get_asset_by_name(
+ project_name, asset_name, fields=["_id"]
+ )
+ if not asset:
+ return None
+ asset_id = asset["_id"]
+
+ subset = get_subset_by_name(
+ project_name, subset_name, asset_id, fields=["_id"]
+ )
+ if not subset:
+ return None
+ return get_last_version_by_subset_id(
+ project_name, subset["_id"], fields=fields
+ )
+
+
+def get_output_link_versions(project_name, version_id, fields=None):
+ if not version_id:
+ return []
+
+ con = get_server_api_connection()
+ version_links = con.get_version_links(
+ project_name, version_id, link_direction="out")
+
+ version_ids = {
+ link["entityId"]
+ for link in version_links
+ if link["entityType"] == "version"
+ }
+ if not version_ids:
+ return []
+
+ return get_versions(project_name, version_ids=version_ids, fields=fields)
+
+
+def version_is_latest(project_name, version_id):
+ con = get_server_api_connection()
+ return con.version_is_latest(project_name, version_id)
+
+
+def get_representation_by_id(project_name, representation_id, fields=None):
+ representations = get_representations(
+ project_name,
+ representation_ids=[representation_id],
+ fields=fields
+ )
+ for representation in representations:
+ return representation
+ return None
+
+
+def get_representation_by_name(
+ project_name, representation_name, version_id, fields=None
+):
+ representations = get_representations(
+ project_name,
+ representation_names=[representation_name],
+ version_ids=[version_id],
+ fields=fields
+ )
+ for representation in representations:
+ return representation
+ return None
+
+
+def get_representations(
+ project_name,
+ representation_ids=None,
+ representation_names=None,
+ version_ids=None,
+ context_filters=None,
+ names_by_version_ids=None,
+ archived=False,
+ standard=True,
+ fields=None
+):
+ if context_filters is not None:
+ # TODO should we add the support?
+ # - there was ability to fitler using regex
+ raise ValueError("OP v4 can't filter by representation context.")
+
+ if not archived and not standard:
+ return
+
+ if archived and not standard:
+ active = False
+ elif not archived and standard:
+ active = True
+ else:
+ active = None
+
+ con = get_server_api_connection()
+ fields = representation_fields_v3_to_v4(fields, con)
+ if fields and active is not None:
+ fields.add("active")
+
+ representations = con.get_representations(
+ project_name,
+ representation_ids,
+ representation_names,
+ version_ids,
+ names_by_version_ids,
+ active,
+ fields=fields
+ )
+ for representation in representations:
+ yield convert_v4_representation_to_v3(representation)
+
+
+def get_representation_parents(project_name, representation):
+ if not representation:
+ return None
+
+ repre_id = representation["_id"]
+ parents_by_repre_id = get_representations_parents(
+ project_name, [representation]
+ )
+ return parents_by_repre_id[repre_id]
+
+
+def get_representations_parents(project_name, representations):
+ repre_ids = {
+ repre["_id"]
+ for repre in representations
+ }
+ con = get_server_api_connection()
+ parents_by_repre_id = con.get_representations_parents(project_name,
+ repre_ids)
+ folder_ids = set()
+ for parents in parents_by_repre_id .values():
+ folder_ids.add(parents[2]["id"])
+
+ tasks_by_folder_id = {}
+
+ new_parents = {}
+ for repre_id, parents in parents_by_repre_id .items():
+ version, subset, folder, project = parents
+ folder_tasks = tasks_by_folder_id.get(folder["id"]) or {}
+ folder["tasks"] = folder_tasks
+ new_parents[repre_id] = (
+ convert_v4_version_to_v3(version),
+ convert_v4_subset_to_v3(subset),
+ convert_v4_folder_to_v3(folder, project_name),
+ project
+ )
+ return new_parents
+
+
+def get_archived_representations(
+ project_name,
+ representation_ids=None,
+ representation_names=None,
+ version_ids=None,
+ context_filters=None,
+ names_by_version_ids=None,
+ fields=None
+):
+ return get_representations(
+ project_name,
+ representation_ids=representation_ids,
+ representation_names=representation_names,
+ version_ids=version_ids,
+ context_filters=context_filters,
+ names_by_version_ids=names_by_version_ids,
+ archived=True,
+ standard=False,
+ fields=fields
+ )
+
+
+def get_thumbnail(
+ project_name, thumbnail_id, entity_type, entity_id, fields=None
+):
+ """Receive thumbnail entity data.
+
+ Args:
+ project_name (str): Name of project where to look for queried entities.
+ thumbnail_id (Union[str, ObjectId]): Id of thumbnail entity.
+ entity_type (str): Type of entity for which the thumbnail should be
+ received.
+ entity_id (str): Id of entity for which the thumbnail should be
+ received.
+ fields (Iterable[str]): Fields that should be returned. All fields are
+ returned if 'None' is passed.
+
+ Returns:
+ None: If thumbnail with specified id was not found.
+ Dict: Thumbnail entity data which can be reduced to specified 'fields'.
+ """
+
+ if not thumbnail_id or not entity_type or not entity_id:
+ return None
+
+ if entity_type == "asset":
+ entity_type = "folder"
+
+ elif entity_type == "hero_version":
+ entity_type = "version"
+
+ return {
+ "_id": thumbnail_id,
+ "type": "thumbnail",
+ "schema": CURRENT_THUMBNAIL_SCHEMA,
+ "data": {
+ "entity_type": entity_type,
+ "entity_id": entity_id
+ }
+ }
+
+
+def get_thumbnails(project_name, thumbnail_contexts, fields=None):
+ """Get thumbnail entities.
+
+ Warning:
+ This function is not OpenPype compatible. There is none usage of this
+ function in codebase so there is nothing to convert. The previous
+ implementation cannot be AYON compatible without entity types.
+ """
+
+ thumbnail_items = set()
+ for thumbnail_context in thumbnail_contexts:
+ thumbnail_id, entity_type, entity_id = thumbnail_context
+ thumbnail_item = get_thumbnail(
+ project_name, thumbnail_id, entity_type, entity_id
+ )
+ if thumbnail_item:
+ thumbnail_items.add(thumbnail_item)
+ return list(thumbnail_items)
+
+
+def get_thumbnail_id_from_source(project_name, src_type, src_id):
+ """Receive thumbnail id from source entity.
+
+ Args:
+ project_name (str): Name of project where to look for queried entities.
+ src_type (str): Type of source entity ('asset', 'version').
+ src_id (Union[str, ObjectId]): Id of source entity.
+
+ Returns:
+ ObjectId: Thumbnail id assigned to entity.
+ None: If Source entity does not have any thumbnail id assigned.
+ """
+
+ if not src_type or not src_id:
+ return None
+
+ if src_type == "version":
+ version = get_version_by_id(
+ project_name, src_id, fields=["data.thumbnail_id"]
+ ) or {}
+ return version.get("data", {}).get("thumbnail_id")
+
+ if src_type == "asset":
+ asset = get_asset_by_id(
+ project_name, src_id, fields=["data.thumbnail_id"]
+ ) or {}
+ return asset.get("data", {}).get("thumbnail_id")
+
+ return None
+
+
+def get_workfile_info(
+ project_name, asset_id, task_name, filename, fields=None
+):
+ if not asset_id or not task_name or not filename:
+ return None
+
+ con = get_server_api_connection()
+ task = con.get_task_by_name(
+ project_name, asset_id, task_name, fields=["id", "name", "folderId"]
+ )
+ if not task:
+ return None
+
+ fields = workfile_info_fields_v3_to_v4(fields)
+
+ for workfile_info in con.get_workfiles_info(
+ project_name, task_ids=[task["id"]], fields=fields
+ ):
+ if workfile_info["name"] == filename:
+ return convert_v4_workfile_info_to_v3(workfile_info, task)
+ return None
diff --git a/openpype/client/server/entity_links.py b/openpype/client/server/entity_links.py
new file mode 100644
index 0000000000..d8395aabe7
--- /dev/null
+++ b/openpype/client/server/entity_links.py
@@ -0,0 +1,156 @@
+import ayon_api
+from ayon_api import get_folder_links, get_versions_links
+
+from .entities import get_assets, get_representation_by_id
+
+
+def get_linked_asset_ids(project_name, asset_doc=None, asset_id=None):
+ """Extract linked asset ids from asset document.
+
+ One of asset document or asset id must be passed.
+
+ Note:
+ Asset links now works only from asset to assets.
+
+ Args:
+ project_name (str): Project where to look for asset.
+ asset_doc (dict): Asset document from DB.
+ asset_id (str): Asset id to find its document.
+
+ Returns:
+ List[Union[ObjectId, str]]: Asset ids of input links.
+ """
+
+ output = []
+ if not asset_doc and not asset_id:
+ return output
+
+ if not asset_id:
+ asset_id = asset_doc["_id"]
+
+ links = get_folder_links(project_name, asset_id, link_direction="in")
+ return [
+ link["entityId"]
+ for link in links
+ if link["entityType"] == "folder"
+ ]
+
+
+def get_linked_assets(
+ project_name, asset_doc=None, asset_id=None, fields=None
+):
+ """Return linked assets based on passed asset document.
+
+ One of asset document or asset id must be passed.
+
+ Args:
+ project_name (str): Name of project where to look for queried entities.
+ asset_doc (Dict[str, Any]): Asset document from database.
+ asset_id (Union[ObjectId, str]): Asset id. Can be used instead of
+ asset document.
+ fields (Iterable[str]): Fields that should be returned. All fields are
+ returned if 'None' is passed.
+
+ Returns:
+ List[Dict[str, Any]]: Asset documents of input links for passed
+ asset doc.
+ """
+
+ link_ids = get_linked_asset_ids(project_name, asset_doc, asset_id)
+ if not link_ids:
+ return []
+ return list(get_assets(project_name, asset_ids=link_ids, fields=fields))
+
+
+
+def get_linked_representation_id(
+ project_name, repre_doc=None, repre_id=None, link_type=None, max_depth=None
+):
+ """Returns list of linked ids of particular type (if provided).
+
+ One of representation document or representation id must be passed.
+ Note:
+ Representation links now works only from representation through version
+ back to representations.
+
+ Todos:
+ Missing depth query. Not sure how it did find more representations in
+ depth, probably links to version?
+
+ Args:
+ project_name (str): Name of project where look for links.
+ repre_doc (Dict[str, Any]): Representation document.
+ repre_id (Union[ObjectId, str]): Representation id.
+ link_type (str): Type of link (e.g. 'reference', ...).
+ max_depth (int): Limit recursion level. Default: 0
+
+ Returns:
+ List[ObjectId] Linked representation ids.
+ """
+
+ if repre_doc:
+ repre_id = repre_doc["_id"]
+
+ if not repre_id and not repre_doc:
+ return []
+
+ version_id = None
+ if repre_doc:
+ version_id = repre_doc.get("parent")
+
+ if not version_id:
+ repre_doc = get_representation_by_id(
+ project_name, repre_id, fields=["parent"]
+ )
+ if repre_doc:
+ version_id = repre_doc["parent"]
+
+ if not version_id:
+ return []
+
+ if max_depth is None or max_depth == 0:
+ max_depth = 1
+
+ link_types = None
+ if link_type:
+ link_types = [link_type]
+
+ # Store already found version ids to avoid recursion, and also to store
+ # output -> Don't forget to remove 'version_id' at the end!!!
+ linked_version_ids = {version_id}
+ # Each loop of depth will reset this variable
+ versions_to_check = {version_id}
+ for _ in range(max_depth):
+ if not versions_to_check:
+ break
+
+ links = get_versions_links(
+ project_name,
+ versions_to_check,
+ link_types=link_types,
+ link_direction="out")
+
+ versions_to_check = set()
+ for link in links:
+ # Care only about version links
+ if link["entityType"] != "version":
+ continue
+ entity_id = link["entityId"]
+ # Skip already found linked version ids
+ if entity_id in linked_version_ids:
+ continue
+ linked_version_ids.add(entity_id)
+ versions_to_check.add(entity_id)
+
+ linked_version_ids.remove(version_id)
+ if not linked_version_ids:
+ return []
+
+ representations = ayon_api.get_representations(
+ project_name,
+ version_ids=linked_version_ids,
+ fields=["id"])
+ return [
+ repre["id"]
+ for repre in representations
+ ]
diff --git a/openpype/client/server/openpype_comp.py b/openpype/client/server/openpype_comp.py
new file mode 100644
index 0000000000..a123fe3167
--- /dev/null
+++ b/openpype/client/server/openpype_comp.py
@@ -0,0 +1,156 @@
+import collections
+from ayon_api.graphql import GraphQlQuery, FIELD_VALUE, fields_to_dict
+
+from .constants import DEFAULT_FOLDER_FIELDS
+
+
+def folders_tasks_graphql_query(fields):
+ query = GraphQlQuery("FoldersQuery")
+ project_name_var = query.add_variable("projectName", "String!")
+ folder_ids_var = query.add_variable("folderIds", "[String!]")
+ parent_folder_ids_var = query.add_variable("parentFolderIds", "[String!]")
+ folder_paths_var = query.add_variable("folderPaths", "[String!]")
+ folder_names_var = query.add_variable("folderNames", "[String!]")
+ has_products_var = query.add_variable("folderHasProducts", "Boolean!")
+
+ project_field = query.add_field("project")
+ project_field.set_filter("name", project_name_var)
+
+ folders_field = project_field.add_field_with_edges("folders")
+ folders_field.set_filter("ids", folder_ids_var)
+ folders_field.set_filter("parentIds", parent_folder_ids_var)
+ folders_field.set_filter("names", folder_names_var)
+ folders_field.set_filter("paths", folder_paths_var)
+ folders_field.set_filter("hasProducts", has_products_var)
+
+ fields = set(fields)
+ fields.discard("tasks")
+ tasks_field = folders_field.add_field_with_edges("tasks")
+ tasks_field.add_field("name")
+ tasks_field.add_field("taskType")
+
+ nested_fields = fields_to_dict(fields)
+
+ query_queue = collections.deque()
+ for key, value in nested_fields.items():
+ query_queue.append((key, value, folders_field))
+
+ while query_queue:
+ item = query_queue.popleft()
+ key, value, parent = item
+ field = parent.add_field(key)
+ if value is FIELD_VALUE:
+ continue
+
+ for k, v in value.items():
+ query_queue.append((k, v, field))
+ return query
+
+
+def get_folders_with_tasks(
+ con,
+ project_name,
+ folder_ids=None,
+ folder_paths=None,
+ folder_names=None,
+ parent_ids=None,
+ active=True,
+ fields=None
+):
+ """Query folders with tasks from server.
+
+ This is for v4 compatibility where tasks were stored on assets. This is
+ an inefficient way how folders and tasks are queried so it was added only
+ as compatibility function.
+
+ Todos:
+ Folder name won't be unique identifier, so we should add folder path
+ filtering.
+
+ Notes:
+ Filter 'active' don't have direct filter in GraphQl.
+
+ Args:
+ con (ServerAPI): Connection to server.
+ project_name (str): Name of project where folders are.
+ folder_ids (Iterable[str]): Folder ids to filter.
+ folder_paths (Iterable[str]): Folder paths used for filtering.
+ folder_names (Iterable[str]): Folder names used for filtering.
+ parent_ids (Iterable[str]): Ids of folder parents. Use 'None'
+ if folder is direct child of project.
+ active (Union[bool, None]): Filter active/inactive folders. Both
+ are returned if is set to None.
+ fields (Union[Iterable(str), None]): Fields to be queried
+ for folder. All possible folder fields are returned if 'None'
+ is passed.
+
+ Returns:
+ List[Dict[str, Any]]: Queried folder entities.
+ """
+
+ if not project_name:
+ return []
+
+ filters = {
+ "projectName": project_name
+ }
+ if folder_ids is not None:
+ folder_ids = set(folder_ids)
+ if not folder_ids:
+ return []
+ filters["folderIds"] = list(folder_ids)
+
+ if folder_paths is not None:
+ folder_paths = set(folder_paths)
+ if not folder_paths:
+ return []
+ filters["folderPaths"] = list(folder_paths)
+
+ if folder_names is not None:
+ folder_names = set(folder_names)
+ if not folder_names:
+ return []
+ filters["folderNames"] = list(folder_names)
+
+ if parent_ids is not None:
+ parent_ids = set(parent_ids)
+ if not parent_ids:
+ return []
+ if None in parent_ids:
+ # Replace 'None' with '"root"' which is used during GraphQl
+ # query for parent ids filter for folders without folder
+ # parent
+ parent_ids.remove(None)
+ parent_ids.add("root")
+
+ if project_name in parent_ids:
+ # Replace project name with '"root"' which is used during
+ # GraphQl query for parent ids filter for folders without
+ # folder parent
+ parent_ids.remove(project_name)
+ parent_ids.add("root")
+
+ filters["parentFolderIds"] = list(parent_ids)
+
+ if fields:
+ fields = set(fields)
+ else:
+ fields = con.get_default_fields_for_type("folder")
+ fields |= DEFAULT_FOLDER_FIELDS
+
+ if active is not None:
+ fields.add("active")
+
+ query = folders_tasks_graphql_query(fields)
+ for attr, filter_value in filters.items():
+ query.set_variable_value(attr, filter_value)
+
+ parsed_data = query.query(con)
+ folders = parsed_data["project"]["folders"]
+ if active is None:
+ return folders
+ return [
+ folder
+ for folder in folders
+ if folder["active"] is active
+ ]
diff --git a/openpype/client/server/operations.py b/openpype/client/server/operations.py
new file mode 100644
index 0000000000..5b38405c34
--- /dev/null
+++ b/openpype/client/server/operations.py
@@ -0,0 +1,881 @@
+import copy
+import json
+import collections
+import uuid
+import datetime
+
+from bson.objectid import ObjectId
+from ayon_api import get_server_api_connection
+
+from openpype.client.operations_base import (
+ REMOVED_VALUE,
+ CreateOperation,
+ UpdateOperation,
+ DeleteOperation,
+ BaseOperationsSession
+)
+
+from openpype.client.mongo.operations import (
+ CURRENT_THUMBNAIL_SCHEMA,
+ CURRENT_REPRESENTATION_SCHEMA,
+ CURRENT_HERO_VERSION_SCHEMA,
+ CURRENT_VERSION_SCHEMA,
+ CURRENT_SUBSET_SCHEMA,
+ CURRENT_ASSET_DOC_SCHEMA,
+ CURRENT_PROJECT_SCHEMA,
+)
+
+from .conversion_utils import (
+ convert_create_asset_to_v4,
+ convert_create_task_to_v4,
+ convert_create_subset_to_v4,
+ convert_create_version_to_v4,
+ convert_create_hero_version_to_v4,
+ convert_create_representation_to_v4,
+ convert_create_workfile_info_to_v4,
+
+ convert_update_folder_to_v4,
+ convert_update_subset_to_v4,
+ convert_update_version_to_v4,
+ convert_update_hero_version_to_v4,
+ convert_update_representation_to_v4,
+ convert_update_workfile_info_to_v4,
+)
+from .utils import create_entity_id
+
+
+def _create_or_convert_to_id(entity_id=None):
+ if entity_id is None:
+ return create_entity_id()
+
+ if isinstance(entity_id, ObjectId):
+ raise TypeError("Type of 'ObjectId' is not supported anymore.")
+
+ # Validate if can be converted to uuid
+ uuid.UUID(entity_id)
+ return entity_id
+
+
+def new_project_document(
+ project_name, project_code, config, data=None, entity_id=None
+):
+ """Create skeleton data of project document.
+
+ Args:
+ project_name (str): Name of project. Used as identifier of a project.
+ project_code (str): Shorter version of projet without spaces and
+ special characters (in most of cases). Should be also considered
+ as unique name across projects.
+ config (Dic[str, Any]): Project config consist of roots, templates,
+ applications and other project Anatomy related data.
+ data (Dict[str, Any]): Project data with information about it's
+ attributes (e.g. 'fps' etc.) or integration specific keys.
+ entity_id (Union[str, ObjectId]): Predefined id of document. New id is
+ created if not passed.
+
+ Returns:
+ Dict[str, Any]: Skeleton of project document.
+ """
+
+ if data is None:
+ data = {}
+
+ data["code"] = project_code
+
+ return {
+ "_id": _create_or_convert_to_id(entity_id),
+ "name": project_name,
+ "type": CURRENT_PROJECT_SCHEMA,
+ "entity_data": data,
+ "config": config
+ }
+
+
+def new_asset_document(
+ name, project_id, parent_id, parents, data=None, entity_id=None
+):
+ """Create skeleton data of asset document.
+
+ Args:
+ name (str): Is considered as unique identifier of asset in project.
+ project_id (Union[str, ObjectId]): Id of project doument.
+ parent_id (Union[str, ObjectId]): Id of parent asset.
+ parents (List[str]): List of parent assets names.
+ data (Dict[str, Any]): Asset document data. Empty dictionary is used
+ if not passed. Value of 'parent_id' is used to fill 'visualParent'.
+ entity_id (Union[str, ObjectId]): Predefined id of document. New id is
+ created if not passed.
+
+ Returns:
+ Dict[str, Any]: Skeleton of asset document.
+ """
+
+ if data is None:
+ data = {}
+ if parent_id is not None:
+ parent_id = _create_or_convert_to_id(parent_id)
+ data["visualParent"] = parent_id
+ data["parents"] = parents
+
+ return {
+ "_id": _create_or_convert_to_id(entity_id),
+ "type": "asset",
+ "name": name,
+ # This will be ignored
+ "parent": project_id,
+ "data": data,
+ "schema": CURRENT_ASSET_DOC_SCHEMA
+ }
+
+
+def new_subset_document(name, family, asset_id, data=None, entity_id=None):
+ """Create skeleton data of subset document.
+
+ Args:
+ name (str): Is considered as unique identifier of subset under asset.
+ family (str): Subset's family.
+ asset_id (Union[str, ObjectId]): Id of parent asset.
+ data (Dict[str, Any]): Subset document data. Empty dictionary is used
+ if not passed. Value of 'family' is used to fill 'family'.
+ entity_id (Union[str, ObjectId]): Predefined id of document. New id is
+ created if not passed.
+
+ Returns:
+ Dict[str, Any]: Skeleton of subset document.
+ """
+
+ if data is None:
+ data = {}
+ data["family"] = family
+ return {
+ "_id": _create_or_convert_to_id(entity_id),
+ "schema": CURRENT_SUBSET_SCHEMA,
+ "type": "subset",
+ "name": name,
+ "data": data,
+ "parent": _create_or_convert_to_id(asset_id)
+ }
+
+
+def new_version_doc(version, subset_id, data=None, entity_id=None):
+ """Create skeleton data of version document.
+
+ Args:
+ version (int): Is considered as unique identifier of version
+ under subset.
+ subset_id (Union[str, ObjectId]): Id of parent subset.
+ data (Dict[str, Any]): Version document data.
+ entity_id (Union[str, ObjectId]): Predefined id of document. New id is
+ created if not passed.
+
+ Returns:
+ Dict[str, Any]: Skeleton of version document.
+ """
+
+ if data is None:
+ data = {}
+
+ return {
+ "_id": _create_or_convert_to_id(entity_id),
+ "schema": CURRENT_VERSION_SCHEMA,
+ "type": "version",
+ "name": int(version),
+ "parent": _create_or_convert_to_id(subset_id),
+ "data": data
+ }
+
+
+def new_hero_version_doc(subset_id, data, version=None, entity_id=None):
+ """Create skeleton data of hero version document.
+
+ Args:
+ subset_id (Union[str, ObjectId]): Id of parent subset.
+ data (Dict[str, Any]): Version document data.
+ version (int): Version of source version.
+ entity_id (Union[str, ObjectId]): Predefined id of document. New id is
+ created if not passed.
+
+ Returns:
+ Dict[str, Any]: Skeleton of version document.
+ """
+
+ if version is None:
+ version = -1
+ elif version > 0:
+ version = -version
+
+ return {
+ "_id": _create_or_convert_to_id(entity_id),
+ "schema": CURRENT_HERO_VERSION_SCHEMA,
+ "type": "hero_version",
+ "version": version,
+ "parent": _create_or_convert_to_id(subset_id),
+ "data": data
+ }
+
+
+def new_representation_doc(
+ name, version_id, context, data=None, entity_id=None
+):
+ """Create skeleton data of representation document.
+
+ Args:
+ name (str): Representation name considered as unique identifier
+ of representation under version.
+ version_id (Union[str, ObjectId]): Id of parent version.
+ context (Dict[str, Any]): Representation context used for fill template
+ of to query.
+ data (Dict[str, Any]): Representation document data.
+ entity_id (Union[str, ObjectId]): Predefined id of document. New id is
+ created if not passed.
+
+ Returns:
+ Dict[str, Any]: Skeleton of version document.
+ """
+
+ if data is None:
+ data = {}
+
+ return {
+ "_id": _create_or_convert_to_id(entity_id),
+ "schema": CURRENT_REPRESENTATION_SCHEMA,
+ "type": "representation",
+ "parent": _create_or_convert_to_id(version_id),
+ "name": name,
+ "data": data,
+
+ # Imprint shortcut to context for performance reasons.
+ "context": context
+ }
+
+
+def new_thumbnail_doc(data=None, entity_id=None):
+ """Create skeleton data of thumbnail document.
+
+ Args:
+ data (Dict[str, Any]): Thumbnail document data.
+ entity_id (Union[str, ObjectId]): Predefined id of document. New id is
+ created if not passed.
+
+ Returns:
+ Dict[str, Any]: Skeleton of thumbnail document.
+ """
+
+ if data is None:
+ data = {}
+
+ return {
+ "_id": _create_or_convert_to_id(entity_id),
+ "type": "thumbnail",
+ "schema": CURRENT_THUMBNAIL_SCHEMA,
+ "data": data
+ }
+
+
+def new_workfile_info_doc(
+ filename, asset_id, task_name, files, data=None, entity_id=None
+):
+ """Create skeleton data of workfile info document.
+
+ Workfile document is at this moment used primarily for artist notes.
+
+ Args:
+ filename (str): Filename of workfile.
+ asset_id (Union[str, ObjectId]): Id of asset under which workfile live.
+ task_name (str): Task under which was workfile created.
+ files (List[str]): List of rootless filepaths related to workfile.
+ data (Dict[str, Any]): Additional metadata.
+
+ Returns:
+ Dict[str, Any]: Skeleton of workfile info document.
+ """
+
+ if not data:
+ data = {}
+
+ return {
+ "_id": _create_or_convert_to_id(entity_id),
+ "type": "workfile",
+ "parent": _create_or_convert_to_id(asset_id),
+ "task_name": task_name,
+ "filename": filename,
+ "data": data,
+ "files": files
+ }
+
+
+def _prepare_update_data(old_doc, new_doc, replace):
+ changes = {}
+ for key, value in new_doc.items():
+ if key not in old_doc or value != old_doc[key]:
+ changes[key] = value
+
+ if replace:
+ for key in old_doc.keys():
+ if key not in new_doc:
+ changes[key] = REMOVED_VALUE
+ return changes
+
+
+def prepare_subset_update_data(old_doc, new_doc, replace=True):
+ """Compare two subset documents and prepare update data.
+
+ Based on compared values will create update data for
+ 'MongoUpdateOperation'.
+
+ Empty output means that documents are identical.
+
+ Returns:
+ Dict[str, Any]: Changes between old and new document.
+ """
+
+ return _prepare_update_data(old_doc, new_doc, replace)
+
+
+def prepare_version_update_data(old_doc, new_doc, replace=True):
+ """Compare two version documents and prepare update data.
+
+ Based on compared values will create update data for
+ 'MongoUpdateOperation'.
+
+ Empty output means that documents are identical.
+
+ Returns:
+ Dict[str, Any]: Changes between old and new document.
+ """
+
+ return _prepare_update_data(old_doc, new_doc, replace)
+
+
+def prepare_hero_version_update_data(old_doc, new_doc, replace=True):
+ """Compare two hero version documents and prepare update data.
+
+ Based on compared values will create update data for 'UpdateOperation'.
+
+ Empty output means that documents are identical.
+
+ Returns:
+ Dict[str, Any]: Changes between old and new document.
+ """
+
+ changes = _prepare_update_data(old_doc, new_doc, replace)
+ changes.pop("version_id", None)
+ return changes
+
+
+def prepare_representation_update_data(old_doc, new_doc, replace=True):
+ """Compare two representation documents and prepare update data.
+
+ Based on compared values will create update data for
+ 'MongoUpdateOperation'.
+
+ Empty output means that documents are identical.
+
+ Returns:
+ Dict[str, Any]: Changes between old and new document.
+ """
+
+ changes = _prepare_update_data(old_doc, new_doc, replace)
+ context = changes.get("data", {}).get("context")
+ # Make sure that both 'family' and 'subset' are in changes if
+ # one of them changed (they'll both become 'product').
+ if (
+ context
+ and ("family" in context or "subset" in context)
+ ):
+ context["family"] = new_doc["data"]["context"]["family"]
+ context["subset"] = new_doc["data"]["context"]["subset"]
+
+ return changes
+
+
+def prepare_workfile_info_update_data(old_doc, new_doc, replace=True):
+ """Compare two workfile info documents and prepare update data.
+
+ Based on compared values will create update data for
+ 'MongoUpdateOperation'.
+
+ Empty output means that documents are identical.
+
+ Returns:
+ Dict[str, Any]: Changes between old and new document.
+ """
+
+ return _prepare_update_data(old_doc, new_doc, replace)
+
+
+class FailedOperations(Exception):
+ pass
+
+
+def entity_data_json_default(value):
+ if isinstance(value, datetime.datetime):
+ return int(value.timestamp())
+
+ raise TypeError(
+ "Object of type {} is not JSON serializable".format(str(type(value)))
+ )
+
+
+def failed_json_default(value):
+ return "< Failed value {} > {}".format(type(value), str(value))
+
+
+class ServerCreateOperation(CreateOperation):
+ """Operation to create an entity.
+
+ Args:
+ project_name (str): On which project operation will happen.
+ entity_type (str): Type of entity on which change happens.
+ e.g. 'asset', 'representation' etc.
+ data (Dict[str, Any]): Data of entity that will be created.
+ """
+
+ def __init__(self, project_name, entity_type, data, session):
+ self._session = session
+
+ if not data:
+ data = {}
+ data = copy.deepcopy(data)
+ if entity_type == "project":
+ raise ValueError("Project cannot be created using operations")
+
+ tasks = None
+ if entity_type in "asset":
+ # TODO handle tasks
+ entity_type = "folder"
+ if "data" in data:
+ tasks = data["data"].get("tasks")
+
+ project = self._session.get_project(project_name)
+ new_data = convert_create_asset_to_v4(data, project, self.con)
+
+ elif entity_type == "task":
+ project = self._session.get_project(project_name)
+ new_data = convert_create_task_to_v4(data, project, self.con)
+
+ elif entity_type == "subset":
+ new_data = convert_create_subset_to_v4(data, self.con)
+ entity_type = "product"
+
+ elif entity_type == "version":
+ new_data = convert_create_version_to_v4(data, self.con)
+
+ elif entity_type == "hero_version":
+ new_data = convert_create_hero_version_to_v4(
+ data, project_name, self.con
+ )
+ entity_type = "version"
+
+ elif entity_type in ("representation", "archived_representation"):
+ new_data = convert_create_representation_to_v4(data, self.con)
+ entity_type = "representation"
+
+ elif entity_type == "workfile":
+ new_data = convert_create_workfile_info_to_v4(
+ data, project_name, self.con
+ )
+
+ else:
+ raise ValueError(
+ "Unhandled entity type \"{}\"".format(entity_type)
+ )
+
+ # Simple check if data can be dumped into json
+ # - should raise error on 'ObjectId' object
+ try:
+ new_data = json.loads(
+ json.dumps(new_data, default=entity_data_json_default)
+ )
+
+ except:
+ raise ValueError("Couldn't json parse body: {}".format(
+ json.dumps(new_data, default=failed_json_default)
+ ))
+
+ super(ServerCreateOperation, self).__init__(
+ project_name, entity_type, new_data
+ )
+
+ if "id" not in self._data:
+ self._data["id"] = create_entity_id()
+
+ if tasks:
+ copied_tasks = copy.deepcopy(tasks)
+ for task_name, task in copied_tasks.items():
+ task["name"] = task_name
+ task["folderId"] = self._data["id"]
+ self.session.create_entity(
+ project_name, "task", task, nested_id=self.id
+ )
+
+ @property
+ def con(self):
+ return self.session.con
+
+ @property
+ def session(self):
+ return self._session
+
+ @property
+ def entity_id(self):
+ return self._data["id"]
+
+ def to_server_operation(self):
+ return {
+ "id": self.id,
+ "type": "create",
+ "entityType": self.entity_type,
+ "entityId": self.entity_id,
+ "data": self._data
+ }
+
+
+class ServerUpdateOperation(UpdateOperation):
+ """Operation to update an entity.
+
+ Args:
+ project_name (str): On which project operation will happen.
+ entity_type (str): Type of entity on which change happens.
+ e.g. 'asset', 'representation' etc.
+ entity_id (Union[str, ObjectId]): Identifier of an entity.
+ update_data (Dict[str, Any]): Key -> value changes that will be set in
+ database. If value is set to 'REMOVED_VALUE' the key will be
+ removed. Only first level of dictionary is checked (on purpose).
+ """
+
+ def __init__(
+ self, project_name, entity_type, entity_id, update_data, session
+ ):
+ self._session = session
+
+ update_data = copy.deepcopy(update_data)
+ if entity_type == "project":
+ raise ValueError("Project cannot be created using operations")
+
+ if entity_type in ("asset", "archived_asset"):
+ new_update_data = convert_update_folder_to_v4(
+ project_name, entity_id, update_data, self.con
+ )
+ entity_type = "folder"
+
+ elif entity_type == "subset":
+ new_update_data = convert_update_subset_to_v4(
+ project_name, entity_id, update_data, self.con
+ )
+ entity_type = "product"
+
+ elif entity_type == "version":
+ new_update_data = convert_update_version_to_v4(
+ project_name, entity_id, update_data, self.con
+ )
+
+ elif entity_type == "hero_version":
+ new_update_data = convert_update_hero_version_to_v4(
+ project_name, entity_id, update_data, self.con
+ )
+ entity_type = "version"
+
+ elif entity_type in ("representation", "archived_representation"):
+ new_update_data = convert_update_representation_to_v4(
+ project_name, entity_id, update_data, self.con
+ )
+ entity_type = "representation"
+
+ elif entity_type == "workfile":
+ new_update_data = convert_update_workfile_info_to_v4(
+ project_name, entity_id, update_data, self.con
+ )
+
+ else:
+ raise ValueError(
+ "Unhandled entity type \"{}\"".format(entity_type)
+ )
+
+ try:
+ new_update_data = json.loads(
+ json.dumps(new_update_data, default=entity_data_json_default)
+ )
+
+ except:
+ raise ValueError("Couldn't json parse body: {}".format(
+ json.dumps(new_update_data, default=failed_json_default)
+ ))
+
+ super(ServerUpdateOperation, self).__init__(
+ project_name, entity_type, entity_id, new_update_data
+ )
+
+ @property
+ def con(self):
+ return self.session.con
+
+ @property
+ def session(self):
+ return self._session
+
+ def to_server_operation(self):
+ if not self._update_data:
+ return None
+
+ update_data = {}
+ for key, value in self._update_data.items():
+ if value is REMOVED_VALUE:
+ value = None
+ update_data[key] = value
+
+ return {
+ "id": self.id,
+ "type": "update",
+ "entityType": self.entity_type,
+ "entityId": self.entity_id,
+ "data": update_data
+ }
+
+
+class ServerDeleteOperation(DeleteOperation):
+ """Operation to delete an entity.
+
+ Args:
+ project_name (str): On which project operation will happen.
+ entity_type (str): Type of entity on which change happens.
+ e.g. 'asset', 'representation' etc.
+ entity_id (Union[str, ObjectId]): Entity id that will be removed.
+ """
+
+ def __init__(self, project_name, entity_type, entity_id, session):
+ self._session = session
+
+ if entity_type == "asset":
+ entity_type = "folder"
+
+ elif entity_type == "hero_version":
+ entity_type = "version"
+
+ elif entity_type == "subset":
+ entity_type = "product"
+
+ super(ServerDeleteOperation, self).__init__(
+ project_name, entity_type, entity_id
+ )
+
+ @property
+ def con(self):
+ return self.session.con
+
+ @property
+ def session(self):
+ return self._session
+
+ def to_server_operation(self):
+ return {
+ "id": self.id,
+ "type": self.operation_name,
+ "entityId": self.entity_id,
+ "entityType": self.entity_type,
+ }
+
+
+class OperationsSession(BaseOperationsSession):
+ def __init__(self, con=None, *args, **kwargs):
+ super(OperationsSession, self).__init__(*args, **kwargs)
+ if con is None:
+ con = get_server_api_connection()
+ self._con = con
+ self._project_cache = {}
+ self._nested_operations = collections.defaultdict(list)
+
+ @property
+ def con(self):
+ return self._con
+
+ def get_project(self, project_name):
+ if project_name not in self._project_cache:
+ self._project_cache[project_name] = self.con.get_project(
+ project_name)
+ return copy.deepcopy(self._project_cache[project_name])
+
+ def commit(self):
+ """Commit session operations."""
+
+ operations, self._operations = self._operations, []
+ if not operations:
+ return
+
+ operations_by_project = collections.defaultdict(list)
+ for operation in operations:
+ operations_by_project[operation.project_name].append(operation)
+
+ body_by_id = {}
+ results = []
+ for project_name, operations in operations_by_project.items():
+ operations_body = []
+ for operation in operations:
+ body = operation.to_server_operation()
+ if body is not None:
+ try:
+ json.dumps(body)
+ except:
+ raise ValueError("Couldn't json parse body: {}".format(
+ json.dumps(
+ body, indent=4, default=failed_json_default
+ )
+ ))
+
+ body_by_id[operation.id] = body
+ operations_body.append(body)
+
+ if operations_body:
+ result = self._con.post(
+ "projects/{}/operations".format(project_name),
+ operations=operations_body,
+ canFail=False
+ )
+ results.append(result.data)
+
+ for result in results:
+ if result.get("success"):
+ continue
+
+ if "operations" not in result:
+ raise FailedOperations(
+ "Operation failed. Content: {}".format(str(result))
+ )
+
+ for op_result in result["operations"]:
+ if not op_result["success"]:
+ operation_id = op_result["id"]
+ raise FailedOperations((
+ "Operation \"{}\" failed with data:\n{}\nError: {}."
+ ).format(
+ operation_id,
+ json.dumps(body_by_id[operation_id], indent=4),
+ op_result.get("error", "unknown"),
+ ))
+
+ def create_entity(self, project_name, entity_type, data, nested_id=None):
+ """Fast access to 'ServerCreateOperation'.
+
+ Args:
+ project_name (str): On which project the creation happens.
+ entity_type (str): Which entity type will be created.
+ data (Dicst[str, Any]): Entity data.
+ nested_id (str): Id of other operation from which is triggered
+ operation -> Operations can trigger suboperations but they
+ must be added to operations list after it's parent is added.
+
+ Returns:
+ ServerCreateOperation: Object of update operation.
+ """
+
+ operation = ServerCreateOperation(
+ project_name, entity_type, data, self
+ )
+
+ if nested_id:
+ self._nested_operations[nested_id].append(operation)
+ else:
+ self.add(operation)
+ if operation.id in self._nested_operations:
+ self.extend(self._nested_operations.pop(operation.id))
+
+ return operation
+
+ def update_entity(
+ self, project_name, entity_type, entity_id, update_data, nested_id=None
+ ):
+ """Fast access to 'ServerUpdateOperation'.
+
+ Returns:
+ ServerUpdateOperation: Object of update operation.
+ """
+
+ operation = ServerUpdateOperation(
+ project_name, entity_type, entity_id, update_data, self
+ )
+ if nested_id:
+ self._nested_operations[nested_id].append(operation)
+ else:
+ self.add(operation)
+ if operation.id in self._nested_operations:
+ self.extend(self._nested_operations.pop(operation.id))
+ return operation
+
+ def delete_entity(
+ self, project_name, entity_type, entity_id, nested_id=None
+ ):
+ """Fast access to 'ServerDeleteOperation'.
+
+ Returns:
+ ServerDeleteOperation: Object of delete operation.
+ """
+
+ operation = ServerDeleteOperation(
+ project_name, entity_type, entity_id, self
+ )
+ if nested_id:
+ self._nested_operations[nested_id].append(operation)
+ else:
+ self.add(operation)
+ if operation.id in self._nested_operations:
+ self.extend(self._nested_operations.pop(operation.id))
+ return operation
+
+
+def create_project(
+ project_name,
+ project_code,
+ library_project=False,
+ preset_name=None,
+ con=None
+):
+ """Create project using OpenPype settings.
+
+ This project creation function is not validating project document on
+ creation. It is because project document is created blindly with only
+ minimum required information about project which is it's name, code, type
+ and schema.
+
+ Entered project name must be unique and project must not exist yet.
+
+ Note:
+ This function is here to be OP v4 ready but in v3 has more logic
+ to do. That's why inner imports are in the body.
+
+ Args:
+ project_name (str): New project name. Should be unique.
+ project_code (str): Project's code should be unique too.
+ library_project (bool): Project is library project.
+ preset_name (str): Name of anatomy preset. Default is used if not
+ passed.
+ con (ServerAPI): Connection to server with logged user.
+
+ Raises:
+ ValueError: When project name already exists in MongoDB.
+
+ Returns:
+ dict: Created project document.
+ """
+
+ if con is None:
+ con = get_server_api_connection()
+
+ return con.create_project(
+ project_name,
+ project_code,
+ library_project,
+ preset_name
+ )
+
+
+def delete_project(project_name, con=None):
+ if con is None:
+ con = get_server_api_connection()
+
+ return con.delete_project(project_name)
+
+
+def create_thumbnail(project_name, src_filepath, thumbnail_id=None, con=None):
+ if con is None:
+ con = get_server_api_connection()
+ return con.create_thumbnail(project_name, src_filepath, thumbnail_id)
diff --git a/openpype/client/server/thumbnails.py b/openpype/client/server/thumbnails.py
new file mode 100644
index 0000000000..dc649b9651
--- /dev/null
+++ b/openpype/client/server/thumbnails.py
@@ -0,0 +1,229 @@
+"""Cache of thumbnails downloaded from AYON server.
+
+Thumbnails are cached to appdirs to predefined directory.
+
+This should be moved to thumbnails logic in pipeline but because it would
+overflow OpenPype logic it's here for now.
+"""
+
+import os
+import time
+import collections
+
+import appdirs
+
+FileInfo = collections.namedtuple(
+ "FileInfo",
+ ("path", "size", "modification_time")
+)
+
+
+class AYONThumbnailCache:
+ """Cache of thumbnails on local storage.
+
+ Thumbnails are cached to appdirs to predefined directory. Each project has
+ own subfolder with thumbnails -> that's because each project has own
+ thumbnail id validation and file names are thumbnail ids with matching
+ extension. Extensions are predefined (.png and .jpeg).
+
+ Cache has cleanup mechanism which is triggered on initialized by default.
+
+ The cleanup has 2 levels:
+ 1. soft cleanup which remove all files that are older then 'days_alive'
+ 2. max size cleanup which remove all files until the thumbnails folder
+ contains less then 'max_filesize'
+ - this is time consuming so it's not triggered automatically
+
+ Args:
+ cleanup (bool): Trigger soft cleanup (Cleanup expired thumbnails).
+ """
+
+ # Lifetime of thumbnails (in seconds)
+ # - default 3 days
+ days_alive = 3
+ # Max size of thumbnail directory (in bytes)
+ # - default 2 Gb
+ max_filesize = 2 * 1024 * 1024 * 1024
+
+ def __init__(self, cleanup=True):
+ self._thumbnails_dir = None
+ self._days_alive_secs = self.days_alive * 24 * 60 * 60
+ if cleanup:
+ self.cleanup()
+
+ def get_thumbnails_dir(self):
+ """Root directory where thumbnails are stored.
+
+ Returns:
+ str: Path to thumbnails root.
+ """
+
+ if self._thumbnails_dir is None:
+ # TODO use generic function
+ directory = appdirs.user_data_dir("AYON", "Ynput")
+ self._thumbnails_dir = os.path.join(directory, "thumbnails")
+ return self._thumbnails_dir
+
+ thumbnails_dir = property(get_thumbnails_dir)
+
+ def get_thumbnails_dir_file_info(self):
+ """Get information about all files in thumbnails directory.
+
+ Returns:
+ List[FileInfo]: List of file information about all files.
+ """
+
+ thumbnails_dir = self.thumbnails_dir
+ files_info = []
+ if not os.path.exists(thumbnails_dir):
+ return files_info
+
+ for root, _, filenames in os.walk(thumbnails_dir):
+ for filename in filenames:
+ path = os.path.join(root, filename)
+ files_info.append(FileInfo(
+ path, os.path.getsize(path), os.path.getmtime(path)
+ ))
+ return files_info
+
+ def get_thumbnails_dir_size(self, files_info=None):
+ """Got full size of thumbnail directory.
+
+ Args:
+ files_info (List[FileInfo]): Prepared file information about
+ files in thumbnail directory.
+
+ Returns:
+ int: File size of all files in thumbnail directory.
+ """
+
+ if files_info is None:
+ files_info = self.get_thumbnails_dir_file_info()
+
+ if not files_info:
+ return 0
+
+ return sum(
+ file_info.size
+ for file_info in files_info
+ )
+
+ def cleanup(self, check_max_size=False):
+ """Cleanup thumbnails directory.
+
+ Args:
+ check_max_size (bool): Also cleanup files to match max size of
+ thumbnails directory.
+ """
+
+ thumbnails_dir = self.get_thumbnails_dir()
+ # Skip if thumbnails dir does not exists yet
+ if not os.path.exists(thumbnails_dir):
+ return
+
+ self._soft_cleanup(thumbnails_dir)
+ if check_max_size:
+ self._max_size_cleanup(thumbnails_dir)
+
+ def _soft_cleanup(self, thumbnails_dir):
+ current_time = time.time()
+ for root, _, filenames in os.walk(thumbnails_dir):
+ for filename in filenames:
+ path = os.path.join(root, filename)
+ modification_time = os.path.getmtime(path)
+ if current_time - modification_time > self._days_alive_secs:
+ os.remove(path)
+
+ def _max_size_cleanup(self, thumbnails_dir):
+ files_info = self.get_thumbnails_dir_file_info()
+ size = self.get_thumbnails_dir_size(files_info)
+ if size < self.max_filesize:
+ return
+
+ sorted_file_info = collections.deque(
+ sorted(files_info, key=lambda item: item.modification_time)
+ )
+ diff = size - self.max_filesize
+ while diff > 0:
+ if not sorted_file_info:
+ break
+
+ file_info = sorted_file_info.popleft()
+ diff -= file_info.size
+ os.remove(file_info.path)
+
+ def get_thumbnail_filepath(self, project_name, thumbnail_id):
+ """Get thumbnail by thumbnail id.
+
+ Args:
+ project_name (str): Name of project.
+ thumbnail_id (str): Thumbnail id.
+
+ Returns:
+ Union[str, None]: Path to thumbnail image or None if thumbnail
+ is not cached yet.
+ """
+
+ if not thumbnail_id:
+ return None
+
+ for ext in (
+ ".png",
+ ".jpeg",
+ ):
+ filepath = os.path.join(
+ self.thumbnails_dir, project_name, thumbnail_id + ext
+ )
+ if os.path.exists(filepath):
+ return filepath
+ return None
+
+ def get_project_dir(self, project_name):
+ """Path to root directory for specific project.
+
+ Args:
+ project_name (str): Name of project for which root directory path
+ should be returned.
+
+ Returns:
+ str: Path to root of project's thumbnails.
+ """
+
+ return os.path.join(self.thumbnails_dir, project_name)
+
+ def make_sure_project_dir_exists(self, project_name):
+ project_dir = self.get_project_dir(project_name)
+ if not os.path.exists(project_dir):
+ os.makedirs(project_dir)
+ return project_dir
+
+ def store_thumbnail(self, project_name, thumbnail_id, content, mime_type):
+ """Store thumbnail to cache folder.
+
+ Args:
+ project_name (str): Project where the thumbnail belong to.
+ thumbnail_id (str): Id of thumbnail.
+ content (bytes): Byte content of thumbnail file.
+ mime_data (str): Type of content.
+
+ Returns:
+ str: Path to cached thumbnail image file.
+ """
+
+ if mime_type == "image/png":
+ ext = ".png"
+ elif mime_type == "image/jpeg":
+ ext = ".jpeg"
+ else:
+ raise ValueError(
+ "Unknown mime type for thumbnail \"{}\"".format(mime_type))
+
+ project_dir = self.make_sure_project_dir_exists(project_name)
+ thumbnail_path = os.path.join(project_dir, thumbnail_id + ext)
+ with open(thumbnail_path, "wb") as stream:
+ stream.write(content)
+
+ current_time = time.time()
+ os.utime(thumbnail_path, (current_time, current_time))
+
+ return thumbnail_path
diff --git a/openpype/client/server/utils.py b/openpype/client/server/utils.py
new file mode 100644
index 0000000000..ed128cfad9
--- /dev/null
+++ b/openpype/client/server/utils.py
@@ -0,0 +1,109 @@
+import uuid
+
+from openpype.client.operations_base import REMOVED_VALUE
+
+
+def create_entity_id():
+ return uuid.uuid1().hex
+
+
+def prepare_attribute_changes(old_entity, new_entity, replace=False):
+ """Prepare changes of attributes on entities.
+
+ Compare 'attrib' of old and new entity data to prepare only changed
+ values that should be sent to server for update.
+
+ Example:
+ >>> # Limited entity data to 'attrib'
+ >>> old_entity = {
+ ... "attrib": {"attr_1": 1, "attr_2": "MyString", "attr_3": True}
+ ... }
+ >>> new_entity = {
+ ... "attrib": {"attr_1": 2, "attr_3": True, "attr_4": 3}
+ ... }
+ >>> # Changes if replacement should not happen
+ >>> expected_changes = {
+ ... "attr_1": 2,
+ ... "attr_4": 3
+ ... }
+ >>> changes = prepare_attribute_changes(old_entity, new_entity)
+ >>> changes == expected_changes
+ True
+
+ >>> # Changes if replacement should happen
+ >>> expected_changes_replace = {
+ ... "attr_1": 2,
+ ... "attr_2": REMOVED_VALUE,
+ ... "attr_4": 3
+ ... }
+ >>> changes_replace = prepare_attribute_changes(
+ ... old_entity, new_entity, True)
+ >>> changes_replace == expected_changes_replace
+ True
+
+ Args:
+ old_entity (dict[str, Any]): Data of entity queried from server.
+ new_entity (dict[str, Any]): Entity data with applied changes.
+ replace (bool): New entity should fully replace all old entity values.
+
+ Returns:
+ Dict[str, Any]: Values from new entity only if value has changed.
+ """
+
+ attrib_changes = {}
+ new_attrib = new_entity.get("attrib")
+ old_attrib = old_entity.get("attrib")
+ if new_attrib is None:
+ if not replace:
+ return attrib_changes
+ new_attrib = {}
+
+ if old_attrib is None:
+ return new_attrib
+
+ for attr, new_attr_value in new_attrib.items():
+ old_attr_value = old_attrib.get(attr)
+ if old_attr_value != new_attr_value:
+ attrib_changes[attr] = new_attr_value
+
+ if replace:
+ for attr in old_attrib:
+ if attr not in new_attrib:
+ attrib_changes[attr] = REMOVED_VALUE
+
+ return attrib_changes
+
+
+def prepare_entity_changes(old_entity, new_entity, replace=False):
+ """Prepare changes of AYON entities.
+
+ Compare old and new entity to filter values from new data that changed.
+
+ Args:
+ old_entity (dict[str, Any]): Data of entity queried from server.
+ new_entity (dict[str, Any]): Entity data with applied changes.
+ replace (bool): All attributes should be replaced by new values. So
+ all attribute values that are not on new entity will be removed.
+
+ Returns:
+ Dict[str, Any]: Only values from new entity that changed.
+ """
+
+ changes = {}
+ for key, new_value in new_entity.items():
+ if key == "attrib":
+ continue
+
+ old_value = old_entity.get(key)
+ if old_value != new_value:
+ changes[key] = new_value
+
+ if replace:
+ for key in old_entity:
+ if key not in new_entity:
+ changes[key] = REMOVED_VALUE
+
+ attr_changes = prepare_attribute_changes(old_entity, new_entity, replace)
+ if attr_changes:
+ changes["attrib"] = attr_changes
+ return changes
diff --git a/openpype/hooks/pre_add_last_workfile_arg.py b/openpype/hooks/pre_add_last_workfile_arg.py
index c54acbc203..1418bc210b 100644
--- a/openpype/hooks/pre_add_last_workfile_arg.py
+++ b/openpype/hooks/pre_add_last_workfile_arg.py
@@ -1,6 +1,6 @@
import os
-from openpype.lib import PreLaunchHook
+from openpype.lib.applications import PreLaunchHook, LaunchTypes
class AddLastWorkfileToLaunchArgs(PreLaunchHook):
@@ -13,8 +13,8 @@ class AddLastWorkfileToLaunchArgs(PreLaunchHook):
# Execute after workfile template copy
order = 10
- app_groups = [
- "3dsmax",
+ app_groups = {
+ "3dsmax", "adsk_3dsmax",
"maya",
"nuke",
"nukex",
@@ -26,8 +26,9 @@ class AddLastWorkfileToLaunchArgs(PreLaunchHook):
"photoshop",
"tvpaint",
"substancepainter",
- "aftereffects"
- ]
+ "aftereffects",
+ }
+ launch_types = {LaunchTypes.local}
def execute(self):
if not self.data.get("start_last_workfile"):
diff --git a/openpype/hooks/pre_copy_template_workfile.py b/openpype/hooks/pre_copy_template_workfile.py
index 70c549919f..2203ff4396 100644
--- a/openpype/hooks/pre_copy_template_workfile.py
+++ b/openpype/hooks/pre_copy_template_workfile.py
@@ -1,7 +1,7 @@
import os
import shutil
-from openpype.lib import PreLaunchHook
from openpype.settings import get_project_settings
+from openpype.lib.applications import PreLaunchHook, LaunchTypes
from openpype.pipeline.workfile import (
get_custom_workfile_template,
get_custom_workfile_template_by_string_context
@@ -19,7 +19,8 @@ class CopyTemplateWorkfile(PreLaunchHook):
# Before `AddLastWorkfileToLaunchArgs`
order = 0
- app_groups = ["blender", "photoshop", "tvpaint", "aftereffects"]
+ app_groups = {"blender", "photoshop", "tvpaint", "aftereffects"}
+ launch_types = {LaunchTypes.local}
def execute(self):
"""Check if can copy template for context and do it if possible.
diff --git a/openpype/hooks/pre_create_extra_workdir_folders.py b/openpype/hooks/pre_create_extra_workdir_folders.py
index 8856281120..4c9d08b375 100644
--- a/openpype/hooks/pre_create_extra_workdir_folders.py
+++ b/openpype/hooks/pre_create_extra_workdir_folders.py
@@ -1,5 +1,5 @@
import os
-from openpype.lib import PreLaunchHook
+from openpype.lib.applications import PreLaunchHook, LaunchTypes
from openpype.pipeline.workfile import create_workdir_extra_folders
@@ -14,6 +14,7 @@ class CreateWorkdirExtraFolders(PreLaunchHook):
# Execute after workfile template copy
order = 15
+ launch_types = {LaunchTypes.local}
def execute(self):
if not self.application.is_host:
diff --git a/openpype/hooks/pre_global_host_data.py b/openpype/hooks/pre_global_host_data.py
index 8a178915fb..813df24af0 100644
--- a/openpype/hooks/pre_global_host_data.py
+++ b/openpype/hooks/pre_global_host_data.py
@@ -1,15 +1,16 @@
from openpype.client import get_project, get_asset_by_name
-from openpype.lib import (
+from openpype.lib.applications import (
PreLaunchHook,
EnvironmentPrepData,
prepare_app_environments,
prepare_context_environments
)
-from openpype.pipeline import AvalonMongoDB, Anatomy
+from openpype.pipeline import Anatomy
class GlobalHostDataHook(PreLaunchHook):
order = -100
+ launch_types = set()
def execute(self):
"""Prepare global objects to `data` that will be used for sure."""
@@ -26,7 +27,6 @@ class GlobalHostDataHook(PreLaunchHook):
"app": app,
- "dbcon": self.data["dbcon"],
"project_doc": self.data["project_doc"],
"asset_doc": self.data["asset_doc"],
@@ -62,13 +62,6 @@ class GlobalHostDataHook(PreLaunchHook):
# Anatomy
self.data["anatomy"] = Anatomy(project_name)
- # Mongo connection
- dbcon = AvalonMongoDB()
- dbcon.Session["AVALON_PROJECT"] = project_name
- dbcon.install()
-
- self.data["dbcon"] = dbcon
-
# Project document
project_doc = get_project(project_name)
self.data["project_doc"] = project_doc
diff --git a/openpype/hooks/pre_mac_launch.py b/openpype/hooks/pre_mac_launch.py
index f85557a4f0..402e9a5517 100644
--- a/openpype/hooks/pre_mac_launch.py
+++ b/openpype/hooks/pre_mac_launch.py
@@ -1,5 +1,5 @@
import os
-from openpype.lib import PreLaunchHook
+from openpype.lib.applications import PreLaunchHook, LaunchTypes
class LaunchWithTerminal(PreLaunchHook):
@@ -12,7 +12,8 @@ class LaunchWithTerminal(PreLaunchHook):
"""
order = 1000
- platforms = ["darwin"]
+ platforms = {"darwin"}
+ launch_types = {LaunchTypes.local}
def execute(self):
executable = str(self.launch_context.executable)
diff --git a/openpype/hooks/pre_foundry_apps.py b/openpype/hooks/pre_new_console_apps.py
similarity index 71%
rename from openpype/hooks/pre_foundry_apps.py
rename to openpype/hooks/pre_new_console_apps.py
index 21ec8e7881..9727b4fb78 100644
--- a/openpype/hooks/pre_foundry_apps.py
+++ b/openpype/hooks/pre_new_console_apps.py
@@ -1,8 +1,8 @@
import subprocess
-from openpype.lib import PreLaunchHook
+from openpype.lib.applications import PreLaunchHook, LaunchTypes
-class LaunchFoundryAppsWindows(PreLaunchHook):
+class LaunchNewConsoleApps(PreLaunchHook):
"""Foundry applications have specific way how to launch them.
Nuke is executed "like" python process so it is required to pass
@@ -13,12 +13,15 @@ class LaunchFoundryAppsWindows(PreLaunchHook):
# Should be as last hook because must change launch arguments to string
order = 1000
- app_groups = ["nuke", "nukeassist", "nukex", "hiero", "nukestudio"]
- platforms = ["windows"]
+ app_groups = {
+ "nuke", "nukeassist", "nukex", "hiero", "nukestudio", "mayapy"
+ }
+ platforms = {"windows"}
+ launch_types = {LaunchTypes.local}
def execute(self):
# Change `creationflags` to CREATE_NEW_CONSOLE
- # - on Windows nuke will create new window using its console
+ # - on Windows some apps will create new window using its console
# Set `stdout` and `stderr` to None so new created console does not
# have redirected output to DEVNULL in build
self.launch_context.kwargs.update({
diff --git a/openpype/hooks/pre_non_python_host_launch.py b/openpype/hooks/pre_non_python_host_launch.py
index 043cb3c7f6..d9e912c826 100644
--- a/openpype/hooks/pre_non_python_host_launch.py
+++ b/openpype/hooks/pre_non_python_host_launch.py
@@ -1,10 +1,11 @@
import os
-from openpype.lib import (
+from openpype.lib import get_openpype_execute_args
+from openpype.lib.applications import (
+ get_non_python_host_kwargs,
PreLaunchHook,
- get_openpype_execute_args
+ LaunchTypes,
)
-from openpype.lib.applications import get_non_python_host_kwargs
from openpype import PACKAGE_DIR as OPENPYPE_DIR
@@ -16,9 +17,10 @@ class NonPythonHostHook(PreLaunchHook):
python script which launch the host. For these cases it is necessary to
prepend python (or openpype) executable and script path before application's.
"""
- app_groups = ["harmony", "photoshop", "aftereffects"]
+ app_groups = {"harmony", "photoshop", "aftereffects"}
order = 20
+ launch_types = {LaunchTypes.local}
def execute(self):
# Pop executable
@@ -54,4 +56,3 @@ class NonPythonHostHook(PreLaunchHook):
self.launch_context.kwargs = \
get_non_python_host_kwargs(self.launch_context.kwargs)
-
diff --git a/openpype/hooks/pre_ocio_hook.py b/openpype/hooks/pre_ocio_hook.py
index 8f462665bc..e695cf3fe8 100644
--- a/openpype/hooks/pre_ocio_hook.py
+++ b/openpype/hooks/pre_ocio_hook.py
@@ -1,8 +1,6 @@
-from openpype.lib import PreLaunchHook
+from openpype.lib.applications import PreLaunchHook
-from openpype.pipeline.colorspace import (
- get_imageio_config
-)
+from openpype.pipeline.colorspace import get_imageio_config
from openpype.pipeline.template_data import get_template_data_with_names
@@ -10,18 +8,19 @@ class OCIOEnvHook(PreLaunchHook):
"""Set OCIO environment variable for hosts that use OpenColorIO."""
order = 0
- hosts = [
+ hosts = {
"substancepainter",
"fusion",
"blender",
"aftereffects",
- "max",
+ "3dsmax",
"houdini",
"maya",
"nuke",
"hiero",
- "resolve"
- ]
+ "resolve",
+ }
+ launch_types = set()
def execute(self):
"""Hook entry method."""
@@ -39,12 +38,16 @@ class OCIOEnvHook(PreLaunchHook):
host_name=self.host_name,
project_settings=self.data["project_settings"],
anatomy_data=template_data,
- anatomy=self.data["anatomy"]
+ anatomy=self.data["anatomy"],
+ env=self.launch_context.env,
)
if config_data:
ocio_path = config_data["path"]
+ if self.host_name in ["nuke", "hiero"]:
+ ocio_path = ocio_path.replace("\\", "/")
+
self.log.info(
f"Setting OCIO environment to config path: {ocio_path}")
diff --git a/openpype/host/dirmap.py b/openpype/host/dirmap.py
index 42bf80ecec..96a98e808e 100644
--- a/openpype/host/dirmap.py
+++ b/openpype/host/dirmap.py
@@ -32,19 +32,26 @@ class HostDirmap(object):
"""
def __init__(
- self, host_name, project_name, project_settings=None, sync_module=None
+ self,
+ host_name,
+ project_name,
+ project_settings=None,
+ sync_module=None
):
self.host_name = host_name
self.project_name = project_name
self._project_settings = project_settings
- self._sync_module = sync_module # to limit reinit of Modules
+ self._sync_module = sync_module
+ # to limit reinit of Modules
+ self._sync_module_discovered = sync_module is not None
self._log = None
@property
def sync_module(self):
- if self._sync_module is None:
+ if not self._sync_module_discovered:
+ self._sync_module_discovered = True
manager = ModulesManager()
- self._sync_module = manager["sync_server"]
+ self._sync_module = manager.get("sync_server")
return self._sync_module
@property
@@ -149,23 +156,27 @@ class HostDirmap(object):
Returns:
dict : { "source-path": [XXX], "destination-path": [YYYY]}
"""
- project_name = os.getenv("AVALON_PROJECT")
+ project_name = self.project_name
+ sync_module = self.sync_module
mapping = {}
- if (not self.sync_module.enabled or
- project_name not in self.sync_module.get_enabled_projects()):
+ if (
+ sync_module is None
+ or not sync_module.enabled
+ or project_name not in sync_module.get_enabled_projects()
+ ):
return mapping
- active_site = self.sync_module.get_local_normalized_site(
- self.sync_module.get_active_site(project_name))
- remote_site = self.sync_module.get_local_normalized_site(
- self.sync_module.get_remote_site(project_name))
+ active_site = sync_module.get_local_normalized_site(
+ sync_module.get_active_site(project_name))
+ remote_site = sync_module.get_local_normalized_site(
+ sync_module.get_remote_site(project_name))
self.log.debug(
"active {} - remote {}".format(active_site, remote_site)
)
if active_site == "local" and active_site != remote_site:
- sync_settings = self.sync_module.get_sync_project_setting(
+ sync_settings = sync_module.get_sync_project_setting(
project_name,
exclude_locals=False,
cached=False)
@@ -179,7 +190,7 @@ class HostDirmap(object):
self.log.debug("remote overrides {}".format(remote_overrides))
current_platform = platform.system().lower()
- remote_provider = self.sync_module.get_provider_for_site(
+ remote_provider = sync_module.get_provider_for_site(
project_name, remote_site
)
# dirmap has sense only with regular disk provider, in the workfile
diff --git a/openpype/hosts/aftereffects/api/extension.zxp b/openpype/hosts/aftereffects/api/extension.zxp
index 358e9740d3..933dc7dc6c 100644
Binary files a/openpype/hosts/aftereffects/api/extension.zxp and b/openpype/hosts/aftereffects/api/extension.zxp differ
diff --git a/openpype/hosts/aftereffects/api/extension/CSXS/manifest.xml b/openpype/hosts/aftereffects/api/extension/CSXS/manifest.xml
index 0057758320..7329a9e723 100644
--- a/openpype/hosts/aftereffects/api/extension/CSXS/manifest.xml
+++ b/openpype/hosts/aftereffects/api/extension/CSXS/manifest.xml
@@ -1,5 +1,5 @@
-
@@ -10,22 +10,22 @@
-
+
-
+
-
-
+
+
-
+
-
-
+
+
-
+
@@ -63,7 +63,7 @@
550
400
-->
-
+
./icons/iconNormal.png
@@ -71,9 +71,9 @@
./icons/iconDisabled.png
./icons/iconDarkNormal.png
./icons/iconDarkRollover.png
-
+
-
\ No newline at end of file
+
diff --git a/openpype/hosts/aftereffects/api/extension/jsx/hostscript.jsx b/openpype/hosts/aftereffects/api/extension/jsx/hostscript.jsx
index bc443930df..c00844e637 100644
--- a/openpype/hosts/aftereffects/api/extension/jsx/hostscript.jsx
+++ b/openpype/hosts/aftereffects/api/extension/jsx/hostscript.jsx
@@ -215,6 +215,8 @@ function _getItem(item, comps, folders, footages){
* Refactor
*/
var item_type = '';
+ var path = '';
+ var containing_comps = [];
if (item instanceof FolderItem){
item_type = 'folder';
if (!folders){
@@ -222,10 +224,18 @@ function _getItem(item, comps, folders, footages){
}
}
if (item instanceof FootageItem){
- item_type = 'footage';
if (!footages){
return "{}";
}
+ item_type = 'footage';
+ if (item.file){
+ path = item.file.fsName;
+ }
+ if (item.usedIn){
+ for (j = 0; j < item.usedIn.length; ++j){
+ containing_comps.push(item.usedIn[j].id);
+ }
+ }
}
if (item instanceof CompItem){
item_type = 'comp';
@@ -236,7 +246,9 @@ function _getItem(item, comps, folders, footages){
var item = {"name": item.name,
"id": item.id,
- "type": item_type};
+ "type": item_type,
+ "path": path,
+ "containing_comps": containing_comps};
return JSON.stringify(item);
}
diff --git a/openpype/hosts/aftereffects/api/launch_logic.py b/openpype/hosts/aftereffects/api/launch_logic.py
index ea71122042..e90c3dc5b8 100644
--- a/openpype/hosts/aftereffects/api/launch_logic.py
+++ b/openpype/hosts/aftereffects/api/launch_logic.py
@@ -13,13 +13,13 @@ from wsrpc_aiohttp import (
WebSocketAsync
)
-from qtpy import QtCore, QtWidgets
+from qtpy import QtCore
from openpype.lib import Logger
-from openpype.tools.utils import host_tools
from openpype.tests.lib import is_in_tests
from openpype.pipeline import install_host, legacy_io
from openpype.modules import ModulesManager
+from openpype.tools.utils import host_tools, get_openpype_qt_app
from openpype.tools.adobe_webserver.app import WebServerTool
from .ws_stub import get_stub
@@ -43,7 +43,7 @@ def main(*subprocess_args):
install_host(host)
os.environ["OPENPYPE_LOG_NO_COLORS"] = "False"
- app = QtWidgets.QApplication([])
+ app = get_openpype_qt_app()
app.setQuitOnLastWindowClosed(False)
launcher = ProcessLauncher(subprocess_args)
diff --git a/openpype/hosts/aftereffects/api/pipeline.py b/openpype/hosts/aftereffects/api/pipeline.py
index 5566ca9e5b..8fc7a70dd8 100644
--- a/openpype/hosts/aftereffects/api/pipeline.py
+++ b/openpype/hosts/aftereffects/api/pipeline.py
@@ -23,6 +23,7 @@ from openpype.host import (
ILoadHost,
IPublishHost
)
+from openpype.tools.utils import get_openpype_qt_app
from .launch_logic import get_stub
from .ws_stub import ConnectionNotEstablishedYet
@@ -236,10 +237,7 @@ def check_inventory():
return
# Warn about outdated containers.
- _app = QtWidgets.QApplication.instance()
- if not _app:
- print("Starting new QApplication..")
- _app = QtWidgets.QApplication([])
+ _app = get_openpype_qt_app()
message_box = QtWidgets.QMessageBox()
message_box.setIcon(QtWidgets.QMessageBox.Warning)
diff --git a/openpype/hosts/aftereffects/api/ws_stub.py b/openpype/hosts/aftereffects/api/ws_stub.py
index f5b96fa63a..18f530e272 100644
--- a/openpype/hosts/aftereffects/api/ws_stub.py
+++ b/openpype/hosts/aftereffects/api/ws_stub.py
@@ -37,6 +37,9 @@ class AEItem(object):
height = attr.ib(default=None)
is_placeholder = attr.ib(default=False)
uuid = attr.ib(default=False)
+ path = attr.ib(default=False) # path to FootageItem to validate
+ # list of composition Footage is in
+ containing_comps = attr.ib(factory=list)
class AfterEffectsServerStub():
@@ -704,7 +707,10 @@ class AfterEffectsServerStub():
d.get("instance_id"),
d.get("width"),
d.get("height"),
- d.get("is_placeholder"))
+ d.get("is_placeholder"),
+ d.get("uuid"),
+ d.get("path"),
+ d.get("containing_comps"),)
ret.append(item)
return ret
diff --git a/openpype/hosts/aftereffects/plugins/create/create_render.py b/openpype/hosts/aftereffects/plugins/create/create_render.py
index fa79fac78f..fbe600ae68 100644
--- a/openpype/hosts/aftereffects/plugins/create/create_render.py
+++ b/openpype/hosts/aftereffects/plugins/create/create_render.py
@@ -28,7 +28,6 @@ class RenderCreator(Creator):
create_allow_context_change = True
# Settings
- default_variants = []
mark_for_review = True
def create(self, subset_name_from_ui, data, pre_create_data):
@@ -165,12 +164,16 @@ class RenderCreator(Creator):
api.get_stub().rename_item(comp_id,
new_comp_name)
- def apply_settings(self, project_settings, system_settings):
+ def apply_settings(self, project_settings):
plugin_settings = (
project_settings["aftereffects"]["create"]["RenderCreator"]
)
self.mark_for_review = plugin_settings["mark_for_review"]
+ self.default_variants = plugin_settings.get(
+ "default_variants",
+ plugin_settings.get("defaults") or []
+ )
def get_detail_description(self):
return """Creator for Render instances
diff --git a/openpype/hosts/aftereffects/plugins/load/load_background.py b/openpype/hosts/aftereffects/plugins/load/load_background.py
index e7c29fee5a..16f45074aa 100644
--- a/openpype/hosts/aftereffects/plugins/load/load_background.py
+++ b/openpype/hosts/aftereffects/plugins/load/load_background.py
@@ -33,9 +33,10 @@ class BackgroundLoader(api.AfterEffectsLoader):
existing_items,
"{}_{}".format(context["asset"]["name"], name))
- layers = get_background_layers(self.fname)
+ path = self.filepath_from_context(context)
+ layers = get_background_layers(path)
if not layers:
- raise ValueError("No layers found in {}".format(self.fname))
+ raise ValueError("No layers found in {}".format(path))
comp = stub.import_background(None, stub.LOADED_ICON + comp_name,
layers)
diff --git a/openpype/hosts/aftereffects/plugins/load/load_file.py b/openpype/hosts/aftereffects/plugins/load/load_file.py
index 33a86aa505..8d52aac546 100644
--- a/openpype/hosts/aftereffects/plugins/load/load_file.py
+++ b/openpype/hosts/aftereffects/plugins/load/load_file.py
@@ -29,32 +29,27 @@ class FileLoader(api.AfterEffectsLoader):
import_options = {}
- file = self.fname
+ path = self.filepath_from_context(context)
- repr_cont = context["representation"]["context"]
- if "#" not in file:
- frame = repr_cont.get("frame")
- if frame:
- padding = len(frame)
- file = file.replace(frame, "#" * padding)
- import_options['sequence'] = True
+ if len(context["representation"]["files"]) > 1:
+ import_options['sequence'] = True
- if not file:
+ if not path:
repr_id = context["representation"]["_id"]
self.log.warning(
"Representation id `{}` is failing to load".format(repr_id))
return
- file = file.replace("\\", "/")
- if '.psd' in file:
+ path = path.replace("\\", "/")
+ if '.psd' in path:
import_options['ImportAsType'] = 'ImportAsType.COMP'
- comp = stub.import_file(self.fname, stub.LOADED_ICON + comp_name,
+ comp = stub.import_file(path, stub.LOADED_ICON + comp_name,
import_options)
if not comp:
self.log.warning(
- "Representation id `{}` is failing to load".format(file))
+ "Representation `{}` is failing to load".format(path))
self.log.warning("Check host app for alert error.")
return
diff --git a/openpype/hosts/aftereffects/plugins/publish/closeAE.py b/openpype/hosts/aftereffects/plugins/publish/closeAE.py
index eff2573e8f..0be20d9f05 100644
--- a/openpype/hosts/aftereffects/plugins/publish/closeAE.py
+++ b/openpype/hosts/aftereffects/plugins/publish/closeAE.py
@@ -15,7 +15,7 @@ class CloseAE(pyblish.api.ContextPlugin):
active = True
hosts = ["aftereffects"]
- targets = ["remotepublish"]
+ targets = ["automated"]
def process(self, context):
self.log.info("CloseAE")
diff --git a/openpype/hosts/aftereffects/plugins/publish/collect_render.py b/openpype/hosts/aftereffects/plugins/publish/collect_render.py
index aa46461915..49874d6cff 100644
--- a/openpype/hosts/aftereffects/plugins/publish/collect_render.py
+++ b/openpype/hosts/aftereffects/plugins/publish/collect_render.py
@@ -138,7 +138,6 @@ class CollectAERender(publish.AbstractCollectRender):
fam = "render.farm"
if fam not in instance.families:
instance.families.append(fam)
- instance.toBeRenderedOn = "deadline"
instance.renderer = "aerender"
instance.farm = True # to skip integrate
if "review" in instance.families:
diff --git a/openpype/hosts/aftereffects/plugins/publish/collect_workfile.py b/openpype/hosts/aftereffects/plugins/publish/collect_workfile.py
index c21c3623c3..dc557f67fc 100644
--- a/openpype/hosts/aftereffects/plugins/publish/collect_workfile.py
+++ b/openpype/hosts/aftereffects/plugins/publish/collect_workfile.py
@@ -1,7 +1,6 @@
import os
import pyblish.api
-from openpype.pipeline import legacy_io
from openpype.pipeline.create import get_subset_name
@@ -44,7 +43,7 @@ class CollectWorkfile(pyblish.api.ContextPlugin):
instance.data["publish"] = instance.data["active"] # for DL
def _get_new_instance(self, context, scene_file):
- task = legacy_io.Session["AVALON_TASK"]
+ task = context.data["task"]
version = context.data["version"]
asset_entity = context.data["assetEntity"]
project_entity = context.data["projectEntity"]
diff --git a/openpype/hosts/aftereffects/plugins/publish/extract_local_render.py b/openpype/hosts/aftereffects/plugins/publish/extract_local_render.py
index c70aa41dbe..bdb48e11f8 100644
--- a/openpype/hosts/aftereffects/plugins/publish/extract_local_render.py
+++ b/openpype/hosts/aftereffects/plugins/publish/extract_local_render.py
@@ -1,11 +1,5 @@
import os
-import sys
-import six
-from openpype.lib import (
- get_ffmpeg_tool_path,
- run_subprocess,
-)
from openpype.pipeline import publish
from openpype.hosts.aftereffects.api import get_stub
diff --git a/openpype/hosts/aftereffects/plugins/publish/help/validate_footage_items.xml b/openpype/hosts/aftereffects/plugins/publish/help/validate_footage_items.xml
new file mode 100644
index 0000000000..01c8966015
--- /dev/null
+++ b/openpype/hosts/aftereffects/plugins/publish/help/validate_footage_items.xml
@@ -0,0 +1,14 @@
+
+
+
+Footage item missing
+
+## Footage item missing
+
+ FootageItem `{name}` contains missing `{path}`. Render will not produce any frames and AE will stop react to any integration
+### How to repair?
+
+Remove `{name}` or provide missing file.
+
+
+
diff --git a/openpype/hosts/aftereffects/plugins/publish/validate_footage_items.py b/openpype/hosts/aftereffects/plugins/publish/validate_footage_items.py
new file mode 100644
index 0000000000..40a08a2c3f
--- /dev/null
+++ b/openpype/hosts/aftereffects/plugins/publish/validate_footage_items.py
@@ -0,0 +1,49 @@
+# -*- coding: utf-8 -*-
+"""Validate presence of footage items in composition
+Requires:
+"""
+import os
+
+import pyblish.api
+
+from openpype.pipeline import (
+ PublishXmlValidationError
+)
+from openpype.hosts.aftereffects.api import get_stub
+
+
+class ValidateFootageItems(pyblish.api.InstancePlugin):
+ """
+ Validates if FootageItems contained in composition exist.
+
+ AE fails silently and doesn't render anything if footage item file is
+ missing. This will result in nonresponsiveness of AE UI as it expects
+ reaction from user, but it will not provide dialog.
+ This validator tries to check existence of the files.
+ It will not protect from missing frame in multiframes though
+ (as AE api doesn't provide this information and it cannot be told how many
+ frames should be there easily). Missing frame is replaced by placeholder.
+ """
+
+ order = pyblish.api.ValidatorOrder
+ label = "Validate Footage Items"
+ families = ["render.farm", "render.local", "render"]
+ hosts = ["aftereffects"]
+ optional = True
+
+ def process(self, instance):
+ """Plugin entry point."""
+
+ comp_id = instance.data["comp_id"]
+ for footage_item in get_stub().get_items(comps=False, folders=False,
+ footages=True):
+ self.log.info(footage_item)
+ if comp_id not in footage_item.containing_comps:
+ continue
+
+ path = footage_item.path
+ if path and not os.path.exists(path):
+ msg = f"File {path} not found."
+ formatting = {"name": footage_item.name, "path": path}
+ raise PublishXmlValidationError(self, msg,
+ formatting_data=formatting)
diff --git a/openpype/hosts/aftereffects/plugins/publish/validate_instance_asset.py b/openpype/hosts/aftereffects/plugins/publish/validate_instance_asset.py
index 6c36136b20..36f6035d23 100644
--- a/openpype/hosts/aftereffects/plugins/publish/validate_instance_asset.py
+++ b/openpype/hosts/aftereffects/plugins/publish/validate_instance_asset.py
@@ -1,6 +1,6 @@
import pyblish.api
-from openpype.pipeline import legacy_io
+from openpype.pipeline import get_current_asset_name
from openpype.pipeline.publish import (
ValidateContentsOrder,
PublishXmlValidationError,
@@ -30,7 +30,7 @@ class ValidateInstanceAssetRepair(pyblish.api.Action):
for instance in instances:
data = stub.read(instance[0])
- data["asset"] = legacy_io.Session["AVALON_ASSET"]
+ data["asset"] = get_current_asset_name()
stub.imprint(instance[0].instance_id, data)
@@ -54,7 +54,7 @@ class ValidateInstanceAsset(pyblish.api.InstancePlugin):
def process(self, instance):
instance_asset = instance.data["asset"]
- current_asset = legacy_io.Session["AVALON_ASSET"]
+ current_asset = get_current_asset_name()
msg = (
f"Instance asset {instance_asset} is not the same "
f"as current context {current_asset}."
diff --git a/openpype/hosts/blender/api/__init__.py b/openpype/hosts/blender/api/__init__.py
index 75a11affde..e15f1193a5 100644
--- a/openpype/hosts/blender/api/__init__.py
+++ b/openpype/hosts/blender/api/__init__.py
@@ -38,6 +38,8 @@ from .lib import (
from .capture import capture
+from .render_lib import prepare_rendering
+
__all__ = [
"install",
@@ -66,4 +68,5 @@ __all__ = [
"get_selection",
"capture",
# "unique_name",
+ "prepare_rendering",
]
diff --git a/openpype/hosts/blender/api/colorspace.py b/openpype/hosts/blender/api/colorspace.py
new file mode 100644
index 0000000000..4521612b7d
--- /dev/null
+++ b/openpype/hosts/blender/api/colorspace.py
@@ -0,0 +1,51 @@
+import attr
+
+import bpy
+
+
+@attr.s
+class LayerMetadata(object):
+ """Data class for Render Layer metadata."""
+ frameStart = attr.ib()
+ frameEnd = attr.ib()
+
+
+@attr.s
+class RenderProduct(object):
+ """
+ Getting Colorspace as Specific Render Product Parameter for submitting
+ publish job.
+ """
+ colorspace = attr.ib() # colorspace
+ view = attr.ib() # OCIO view transform
+ productName = attr.ib(default=None)
+
+
+class ARenderProduct(object):
+ def __init__(self):
+ """Constructor."""
+ # Initialize
+ self.layer_data = self._get_layer_data()
+ self.layer_data.products = self.get_render_products()
+
+ def _get_layer_data(self):
+ scene = bpy.context.scene
+
+ return LayerMetadata(
+ frameStart=int(scene.frame_start),
+ frameEnd=int(scene.frame_end),
+ )
+
+ def get_render_products(self):
+ """To be implemented by renderer class.
+ This should return a list of RenderProducts.
+ Returns:
+ list: List of RenderProduct
+ """
+ return [
+ RenderProduct(
+ colorspace="sRGB",
+ view="ACES 1.0",
+ productName=""
+ )
+ ]
diff --git a/openpype/hosts/blender/api/ops.py b/openpype/hosts/blender/api/ops.py
index 91cbfe524f..0eb90eeff9 100644
--- a/openpype/hosts/blender/api/ops.py
+++ b/openpype/hosts/blender/api/ops.py
@@ -16,10 +16,12 @@ import bpy
import bpy.utils.previews
from openpype import style
-from openpype.pipeline import legacy_io
+from openpype import AYON_SERVER_ENABLED
+from openpype.pipeline import get_current_asset_name, get_current_task_name
from openpype.tools.utils import host_tools
from .workio import OpenFileCacher
+from . import pipeline
PREVIEW_COLLECTIONS: Dict = dict()
@@ -283,7 +285,7 @@ class LaunchLoader(LaunchQtApp):
def before_window_show(self):
self._window.set_context(
- {"asset": legacy_io.Session["AVALON_ASSET"]},
+ {"asset": get_current_asset_name()},
refresh=True
)
@@ -330,10 +332,11 @@ class LaunchWorkFiles(LaunchQtApp):
def execute(self, context):
result = super().execute(context)
- self._window.set_context({
- "asset": legacy_io.Session["AVALON_ASSET"],
- "task": legacy_io.Session["AVALON_TASK"]
- })
+ if not AYON_SERVER_ENABLED:
+ self._window.set_context({
+ "asset": get_current_asset_name(),
+ "task": get_current_task_name()
+ })
return result
def before_window_show(self):
@@ -344,6 +347,26 @@ class LaunchWorkFiles(LaunchQtApp):
self._window.refresh()
+class SetFrameRange(bpy.types.Operator):
+ bl_idname = "wm.ayon_set_frame_range"
+ bl_label = "Set Frame Range"
+
+ def execute(self, context):
+ data = pipeline.get_asset_data()
+ pipeline.set_frame_range(data)
+ return {"FINISHED"}
+
+
+class SetResolution(bpy.types.Operator):
+ bl_idname = "wm.ayon_set_resolution"
+ bl_label = "Set Resolution"
+
+ def execute(self, context):
+ data = pipeline.get_asset_data()
+ pipeline.set_resolution(data)
+ return {"FINISHED"}
+
+
class TOPBAR_MT_avalon(bpy.types.Menu):
"""Avalon menu."""
@@ -362,8 +385,8 @@ class TOPBAR_MT_avalon(bpy.types.Menu):
else:
pyblish_menu_icon_id = 0
- asset = legacy_io.Session['AVALON_ASSET']
- task = legacy_io.Session['AVALON_TASK']
+ asset = get_current_asset_name()
+ task = get_current_task_name()
context_label = f"{asset}, {task}"
context_label_item = layout.row()
context_label_item.operator(
@@ -381,9 +404,11 @@ class TOPBAR_MT_avalon(bpy.types.Menu):
layout.operator(LaunchManager.bl_idname, text="Manage...")
layout.operator(LaunchLibrary.bl_idname, text="Library...")
layout.separator()
+ layout.operator(SetFrameRange.bl_idname, text="Set Frame Range")
+ layout.operator(SetResolution.bl_idname, text="Set Resolution")
+ layout.separator()
layout.operator(LaunchWorkFiles.bl_idname, text="Work Files...")
- # TODO (jasper): maybe add 'Reload Pipeline', 'Set Frame Range' and
- # 'Set Resolution'?
+ # TODO (jasper): maybe add 'Reload Pipeline'
def draw_avalon_menu(self, context):
@@ -399,6 +424,8 @@ classes = [
LaunchManager,
LaunchLibrary,
LaunchWorkFiles,
+ SetFrameRange,
+ SetResolution,
TOPBAR_MT_avalon,
]
@@ -411,6 +438,7 @@ def register():
pcoll.load("pyblish_menu_icon", str(pyblish_icon_file.absolute()), 'IMAGE')
PREVIEW_COLLECTIONS["avalon"] = pcoll
+ BlenderApplication.get_app()
for cls in classes:
bpy.utils.register_class(cls)
bpy.types.TOPBAR_MT_editor_menus.append(draw_avalon_menu)
diff --git a/openpype/hosts/blender/api/pipeline.py b/openpype/hosts/blender/api/pipeline.py
index 0f756d8cb6..84af0904f0 100644
--- a/openpype/hosts/blender/api/pipeline.py
+++ b/openpype/hosts/blender/api/pipeline.py
@@ -14,6 +14,8 @@ from openpype.client import get_asset_by_name
from openpype.pipeline import (
schema,
legacy_io,
+ get_current_project_name,
+ get_current_asset_name,
register_loader_plugin_path,
register_creator_plugin_path,
deregister_loader_plugin_path,
@@ -111,22 +113,21 @@ def message_window(title, message):
_process_app_events()
-def set_start_end_frames():
- project_name = legacy_io.active_project()
- asset_name = legacy_io.Session["AVALON_ASSET"]
+def get_asset_data():
+ project_name = get_current_project_name()
+ asset_name = get_current_asset_name()
asset_doc = get_asset_by_name(project_name, asset_name)
+ return asset_doc.get("data")
+
+
+def set_frame_range(data):
scene = bpy.context.scene
# Default scene settings
frameStart = scene.frame_start
frameEnd = scene.frame_end
fps = scene.render.fps / scene.render.fps_base
- resolution_x = scene.render.resolution_x
- resolution_y = scene.render.resolution_y
-
- # Check if settings are set
- data = asset_doc.get("data")
if not data:
return
@@ -137,26 +138,47 @@ def set_start_end_frames():
frameEnd = data.get("frameEnd")
if data.get("fps"):
fps = data.get("fps")
- if data.get("resolutionWidth"):
- resolution_x = data.get("resolutionWidth")
- if data.get("resolutionHeight"):
- resolution_y = data.get("resolutionHeight")
scene.frame_start = frameStart
scene.frame_end = frameEnd
scene.render.fps = round(fps)
scene.render.fps_base = round(fps) / fps
+
+
+def set_resolution(data):
+ scene = bpy.context.scene
+
+ # Default scene settings
+ resolution_x = scene.render.resolution_x
+ resolution_y = scene.render.resolution_y
+
+ if not data:
+ return
+
+ if data.get("resolutionWidth"):
+ resolution_x = data.get("resolutionWidth")
+ if data.get("resolutionHeight"):
+ resolution_y = data.get("resolutionHeight")
+
scene.render.resolution_x = resolution_x
scene.render.resolution_y = resolution_y
def on_new():
- set_start_end_frames()
-
project = os.environ.get("AVALON_PROJECT")
- settings = get_project_settings(project)
+ settings = get_project_settings(project).get("blender")
- unit_scale_settings = settings.get("blender").get("unit_scale_settings")
+ set_resolution_startup = settings.get("set_resolution_startup")
+ set_frames_startup = settings.get("set_frames_startup")
+
+ data = get_asset_data()
+
+ if set_resolution_startup:
+ set_resolution(data)
+ if set_frames_startup:
+ set_frame_range(data)
+
+ unit_scale_settings = settings.get("unit_scale_settings")
unit_scale_enabled = unit_scale_settings.get("enabled")
if unit_scale_enabled:
unit_scale = unit_scale_settings.get("base_file_unit_scale")
@@ -164,12 +186,20 @@ def on_new():
def on_open():
- set_start_end_frames()
-
project = os.environ.get("AVALON_PROJECT")
- settings = get_project_settings(project)
+ settings = get_project_settings(project).get("blender")
- unit_scale_settings = settings.get("blender").get("unit_scale_settings")
+ set_resolution_startup = settings.get("set_resolution_startup")
+ set_frames_startup = settings.get("set_frames_startup")
+
+ data = get_asset_data()
+
+ if set_resolution_startup:
+ set_resolution(data)
+ if set_frames_startup:
+ set_frame_range(data)
+
+ unit_scale_settings = settings.get("unit_scale_settings")
unit_scale_enabled = unit_scale_settings.get("enabled")
apply_on_opening = unit_scale_settings.get("apply_on_opening")
if unit_scale_enabled and apply_on_opening:
@@ -430,36 +460,6 @@ def ls() -> Iterator:
yield parse_container(container)
-def update_hierarchy(containers):
- """Hierarchical container support
-
- This is the function to support Scene Inventory to draw hierarchical
- view for containers.
-
- We need both parent and children to visualize the graph.
-
- """
-
- all_containers = set(ls()) # lookup set
-
- for container in containers:
- # Find parent
- # FIXME (jasperge): re-evaluate this. How would it be possible
- # to 'nest' assets? Collections can have several parents, for
- # now assume it has only 1 parent
- parent = [
- coll for coll in bpy.data.collections if container in coll.children
- ]
- for node in parent:
- if node in all_containers:
- container["parent"] = node
- break
-
- log.debug("Container: %s", container)
-
- yield container
-
-
def publish():
"""Shorthand to publish from within host."""
diff --git a/openpype/hosts/blender/api/plugin.py b/openpype/hosts/blender/api/plugin.py
index 1274795c6b..fb87d08cce 100644
--- a/openpype/hosts/blender/api/plugin.py
+++ b/openpype/hosts/blender/api/plugin.py
@@ -243,7 +243,8 @@ class AssetLoader(LoaderPlugin):
"""
# TODO (jasper): make it possible to add the asset several times by
# just re-using the collection
- assert Path(self.fname).exists(), f"{self.fname} doesn't exist."
+ filepath = self.filepath_from_context(context)
+ assert Path(filepath).exists(), f"{filepath} doesn't exist."
asset = context["asset"]["name"]
subset = context["subset"]["name"]
diff --git a/openpype/hosts/blender/api/render_lib.py b/openpype/hosts/blender/api/render_lib.py
new file mode 100644
index 0000000000..d564b5ebcb
--- /dev/null
+++ b/openpype/hosts/blender/api/render_lib.py
@@ -0,0 +1,255 @@
+import os
+
+import bpy
+
+from openpype.settings import get_project_settings
+from openpype.pipeline import get_current_project_name
+
+
+def get_default_render_folder(settings):
+ """Get default render folder from blender settings."""
+
+ return (settings["blender"]
+ ["RenderSettings"]
+ ["default_render_image_folder"])
+
+
+def get_aov_separator(settings):
+ """Get aov separator from blender settings."""
+
+ aov_sep = (settings["blender"]
+ ["RenderSettings"]
+ ["aov_separator"])
+
+ if aov_sep == "dash":
+ return "-"
+ elif aov_sep == "underscore":
+ return "_"
+ elif aov_sep == "dot":
+ return "."
+ else:
+ raise ValueError(f"Invalid aov separator: {aov_sep}")
+
+
+def get_image_format(settings):
+ """Get image format from blender settings."""
+
+ return (settings["blender"]
+ ["RenderSettings"]
+ ["image_format"])
+
+
+def get_multilayer(settings):
+ """Get multilayer from blender settings."""
+
+ return (settings["blender"]
+ ["RenderSettings"]
+ ["multilayer_exr"])
+
+
+def get_render_product(output_path, name, aov_sep):
+ """
+ Generate the path to the render product. Blender interprets the `#`
+ as the frame number, when it renders.
+
+ Args:
+ file_path (str): The path to the blender scene.
+ render_folder (str): The render folder set in settings.
+ file_name (str): The name of the blender scene.
+ instance (pyblish.api.Instance): The instance to publish.
+ ext (str): The image format to render.
+ """
+ filepath = os.path.join(output_path, name)
+ render_product = f"{filepath}{aov_sep}beauty.####"
+ render_product = render_product.replace("\\", "/")
+
+ return render_product
+
+
+def set_render_format(ext, multilayer):
+ # Set Blender to save the file with the right extension
+ bpy.context.scene.render.use_file_extension = True
+
+ image_settings = bpy.context.scene.render.image_settings
+
+ if ext == "exr":
+ image_settings.file_format = (
+ "OPEN_EXR_MULTILAYER" if multilayer else "OPEN_EXR")
+ elif ext == "bmp":
+ image_settings.file_format = "BMP"
+ elif ext == "rgb":
+ image_settings.file_format = "IRIS"
+ elif ext == "png":
+ image_settings.file_format = "PNG"
+ elif ext == "jpeg":
+ image_settings.file_format = "JPEG"
+ elif ext == "jp2":
+ image_settings.file_format = "JPEG2000"
+ elif ext == "tga":
+ image_settings.file_format = "TARGA"
+ elif ext == "tif":
+ image_settings.file_format = "TIFF"
+
+
+def set_render_passes(settings):
+ aov_list = (settings["blender"]
+ ["RenderSettings"]
+ ["aov_list"])
+
+ custom_passes = (settings["blender"]
+ ["RenderSettings"]
+ ["custom_passes"])
+
+ vl = bpy.context.view_layer
+
+ vl.use_pass_combined = "combined" in aov_list
+ vl.use_pass_z = "z" in aov_list
+ vl.use_pass_mist = "mist" in aov_list
+ vl.use_pass_normal = "normal" in aov_list
+ vl.use_pass_diffuse_direct = "diffuse_light" in aov_list
+ vl.use_pass_diffuse_color = "diffuse_color" in aov_list
+ vl.use_pass_glossy_direct = "specular_light" in aov_list
+ vl.use_pass_glossy_color = "specular_color" in aov_list
+ vl.eevee.use_pass_volume_direct = "volume_light" in aov_list
+ vl.use_pass_emit = "emission" in aov_list
+ vl.use_pass_environment = "environment" in aov_list
+ vl.use_pass_shadow = "shadow" in aov_list
+ vl.use_pass_ambient_occlusion = "ao" in aov_list
+
+ cycles = vl.cycles
+
+ cycles.denoising_store_passes = "denoising" in aov_list
+ cycles.use_pass_volume_direct = "volume_direct" in aov_list
+ cycles.use_pass_volume_indirect = "volume_indirect" in aov_list
+
+ aovs_names = [aov.name for aov in vl.aovs]
+ for cp in custom_passes:
+ cp_name = cp[0]
+ if cp_name not in aovs_names:
+ aov = vl.aovs.add()
+ aov.name = cp_name
+ else:
+ aov = vl.aovs[cp_name]
+ aov.type = cp[1].get("type", "VALUE")
+
+ return aov_list, custom_passes
+
+
+def set_node_tree(output_path, name, aov_sep, ext, multilayer):
+ # Set the scene to use the compositor node tree to render
+ bpy.context.scene.use_nodes = True
+
+ tree = bpy.context.scene.node_tree
+
+ # Get the Render Layers node
+ rl_node = None
+ for node in tree.nodes:
+ if node.bl_idname == "CompositorNodeRLayers":
+ rl_node = node
+ break
+
+ # If there's not a Render Layers node, we create it
+ if not rl_node:
+ rl_node = tree.nodes.new("CompositorNodeRLayers")
+
+ # Get the enabled output sockets, that are the active passes for the
+ # render.
+ # We also exclude some layers.
+ exclude_sockets = ["Image", "Alpha", "Noisy Image"]
+ passes = [
+ socket
+ for socket in rl_node.outputs
+ if socket.enabled and socket.name not in exclude_sockets
+ ]
+
+ # Remove all output nodes
+ for node in tree.nodes:
+ if node.bl_idname == "CompositorNodeOutputFile":
+ tree.nodes.remove(node)
+
+ # Create a new output node
+ output = tree.nodes.new("CompositorNodeOutputFile")
+
+ image_settings = bpy.context.scene.render.image_settings
+ output.format.file_format = image_settings.file_format
+
+ # In case of a multilayer exr, we don't need to use the output node,
+ # because the blender render already outputs a multilayer exr.
+ if ext == "exr" and multilayer:
+ output.layer_slots.clear()
+ return []
+
+ output.file_slots.clear()
+ output.base_path = output_path
+
+ aov_file_products = []
+
+ # For each active render pass, we add a new socket to the output node
+ # and link it
+ for render_pass in passes:
+ filepath = f"{name}{aov_sep}{render_pass.name}.####"
+
+ output.file_slots.new(filepath)
+
+ aov_file_products.append(
+ (render_pass.name, os.path.join(output_path, filepath)))
+
+ node_input = output.inputs[-1]
+
+ tree.links.new(render_pass, node_input)
+
+ return aov_file_products
+
+
+def imprint_render_settings(node, data):
+ RENDER_DATA = "render_data"
+ if not node.get(RENDER_DATA):
+ node[RENDER_DATA] = {}
+ for key, value in data.items():
+ if value is None:
+ continue
+ node[RENDER_DATA][key] = value
+
+
+def prepare_rendering(asset_group):
+ name = asset_group.name
+
+ filepath = bpy.data.filepath
+ assert filepath, "Workfile not saved. Please save the file first."
+
+ file_path = os.path.dirname(filepath)
+ file_name = os.path.basename(filepath)
+ file_name, _ = os.path.splitext(file_name)
+
+ project = get_current_project_name()
+ settings = get_project_settings(project)
+
+ render_folder = get_default_render_folder(settings)
+ aov_sep = get_aov_separator(settings)
+ ext = get_image_format(settings)
+ multilayer = get_multilayer(settings)
+
+ set_render_format(ext, multilayer)
+ aov_list, custom_passes = set_render_passes(settings)
+
+ output_path = os.path.join(file_path, render_folder, file_name)
+
+ render_product = get_render_product(output_path, name, aov_sep)
+ aov_file_product = set_node_tree(
+ output_path, name, aov_sep, ext, multilayer)
+
+ bpy.context.scene.render.filepath = render_product
+
+ render_settings = {
+ "render_folder": render_folder,
+ "aov_separator": aov_sep,
+ "image_format": ext,
+ "multilayer_exr": multilayer,
+ "aov_list": aov_list,
+ "custom_passes": custom_passes,
+ "render_product": render_product,
+ "aov_file_product": aov_file_product,
+ "review": True,
+ }
+
+ imprint_render_settings(asset_group, render_settings)
diff --git a/openpype/hosts/blender/hooks/pre_add_run_python_script_arg.py b/openpype/hosts/blender/hooks/pre_add_run_python_script_arg.py
index 559e9ae0ce..68c9bfdd57 100644
--- a/openpype/hosts/blender/hooks/pre_add_run_python_script_arg.py
+++ b/openpype/hosts/blender/hooks/pre_add_run_python_script_arg.py
@@ -1,6 +1,6 @@
from pathlib import Path
-from openpype.lib import PreLaunchHook
+from openpype.lib.applications import PreLaunchHook, LaunchTypes
class AddPythonScriptToLaunchArgs(PreLaunchHook):
@@ -8,9 +8,8 @@ class AddPythonScriptToLaunchArgs(PreLaunchHook):
# Append after file argument
order = 15
- app_groups = [
- "blender",
- ]
+ app_groups = {"blender"}
+ launch_types = {LaunchTypes.local}
def execute(self):
if not self.launch_context.data.get("python_scripts"):
diff --git a/openpype/hosts/blender/hooks/pre_pyside_install.py b/openpype/hosts/blender/hooks/pre_pyside_install.py
index e5f66d2a26..2aa3a5e49a 100644
--- a/openpype/hosts/blender/hooks/pre_pyside_install.py
+++ b/openpype/hosts/blender/hooks/pre_pyside_install.py
@@ -2,7 +2,7 @@ import os
import re
import subprocess
from platform import system
-from openpype.lib import PreLaunchHook
+from openpype.lib.applications import PreLaunchHook, LaunchTypes
class InstallPySideToBlender(PreLaunchHook):
@@ -16,7 +16,8 @@ class InstallPySideToBlender(PreLaunchHook):
blender's python packages.
"""
- app_groups = ["blender"]
+ app_groups = {"blender"}
+ launch_types = {LaunchTypes.local}
def execute(self):
# Prelaunch hook is not crucial
@@ -30,7 +31,7 @@ class InstallPySideToBlender(PreLaunchHook):
def inner_execute(self):
# Get blender's python directory
- version_regex = re.compile(r"^[2-3]\.[0-9]+$")
+ version_regex = re.compile(r"^[2-4]\.[0-9]+$")
platform = system().lower()
executable = self.launch_context.executable.executable_path
diff --git a/openpype/hosts/blender/hooks/pre_windows_console.py b/openpype/hosts/blender/hooks/pre_windows_console.py
index d6be45b225..2161b7a2f5 100644
--- a/openpype/hosts/blender/hooks/pre_windows_console.py
+++ b/openpype/hosts/blender/hooks/pre_windows_console.py
@@ -1,5 +1,5 @@
import subprocess
-from openpype.lib import PreLaunchHook
+from openpype.lib.applications import PreLaunchHook, LaunchTypes
class BlenderConsoleWindows(PreLaunchHook):
@@ -13,8 +13,9 @@ class BlenderConsoleWindows(PreLaunchHook):
# Should be as last hook because must change launch arguments to string
order = 1000
- app_groups = ["blender"]
- platforms = ["windows"]
+ app_groups = {"blender"}
+ platforms = {"windows"}
+ launch_types = {LaunchTypes.local}
def execute(self):
# Change `creationflags` to CREATE_NEW_CONSOLE
diff --git a/openpype/hosts/blender/plugins/create/create_action.py b/openpype/hosts/blender/plugins/create/create_action.py
index 54b3a501a7..0203ba74c0 100644
--- a/openpype/hosts/blender/plugins/create/create_action.py
+++ b/openpype/hosts/blender/plugins/create/create_action.py
@@ -2,7 +2,7 @@
import bpy
-from openpype.pipeline import legacy_io
+from openpype.pipeline import get_current_task_name
import openpype.hosts.blender.api.plugin
from openpype.hosts.blender.api import lib
@@ -22,7 +22,7 @@ class CreateAction(openpype.hosts.blender.api.plugin.Creator):
name = openpype.hosts.blender.api.plugin.asset_name(asset, subset)
collection = bpy.data.collections.new(name=name)
bpy.context.scene.collection.children.link(collection)
- self.data['task'] = legacy_io.Session.get('AVALON_TASK')
+ self.data['task'] = get_current_task_name()
lib.imprint(collection, self.data)
if (self.options or {}).get("useSelection"):
diff --git a/openpype/hosts/blender/plugins/create/create_animation.py b/openpype/hosts/blender/plugins/create/create_animation.py
index a0e9e5e399..bc2840952b 100644
--- a/openpype/hosts/blender/plugins/create/create_animation.py
+++ b/openpype/hosts/blender/plugins/create/create_animation.py
@@ -2,7 +2,7 @@
import bpy
-from openpype.pipeline import legacy_io
+from openpype.pipeline import get_current_task_name
from openpype.hosts.blender.api import plugin, lib, ops
from openpype.hosts.blender.api.pipeline import AVALON_INSTANCES
@@ -37,7 +37,7 @@ class CreateAnimation(plugin.Creator):
# asset_group.empty_display_type = 'SINGLE_ARROW'
asset_group = bpy.data.collections.new(name=name)
instances.children.link(asset_group)
- self.data['task'] = legacy_io.Session.get('AVALON_TASK')
+ self.data['task'] = get_current_task_name()
lib.imprint(asset_group, self.data)
if (self.options or {}).get("useSelection"):
diff --git a/openpype/hosts/blender/plugins/create/create_blendScene.py b/openpype/hosts/blender/plugins/create/create_blendScene.py
new file mode 100644
index 0000000000..63bcf212ff
--- /dev/null
+++ b/openpype/hosts/blender/plugins/create/create_blendScene.py
@@ -0,0 +1,51 @@
+"""Create a Blender scene asset."""
+
+import bpy
+
+from openpype.pipeline import get_current_task_name
+from openpype.hosts.blender.api import plugin, lib, ops
+from openpype.hosts.blender.api.pipeline import AVALON_INSTANCES
+
+
+class CreateBlendScene(plugin.Creator):
+ """Generic group of assets"""
+
+ name = "blendScene"
+ label = "Blender Scene"
+ family = "blendScene"
+ icon = "cubes"
+
+ def process(self):
+ """ Run the creator on Blender main thread"""
+ mti = ops.MainThreadItem(self._process)
+ ops.execute_in_main_thread(mti)
+
+ def _process(self):
+ # Get Instance Container or create it if it does not exist
+ instances = bpy.data.collections.get(AVALON_INSTANCES)
+ if not instances:
+ instances = bpy.data.collections.new(name=AVALON_INSTANCES)
+ bpy.context.scene.collection.children.link(instances)
+
+ # Create instance object
+ asset = self.data["asset"]
+ subset = self.data["subset"]
+ name = plugin.asset_name(asset, subset)
+ asset_group = bpy.data.objects.new(name=name, object_data=None)
+ asset_group.empty_display_type = 'SINGLE_ARROW'
+ instances.objects.link(asset_group)
+ self.data['task'] = get_current_task_name()
+ lib.imprint(asset_group, self.data)
+
+ # Add selected objects to instance
+ if (self.options or {}).get("useSelection"):
+ bpy.context.view_layer.objects.active = asset_group
+ selected = lib.get_selection()
+ for obj in selected:
+ if obj.parent in selected:
+ obj.select_set(False)
+ continue
+ selected.append(asset_group)
+ bpy.ops.object.parent_set(keep_transform=True)
+
+ return asset_group
diff --git a/openpype/hosts/blender/plugins/create/create_camera.py b/openpype/hosts/blender/plugins/create/create_camera.py
index ada512d7ac..7a770a3e77 100644
--- a/openpype/hosts/blender/plugins/create/create_camera.py
+++ b/openpype/hosts/blender/plugins/create/create_camera.py
@@ -2,7 +2,7 @@
import bpy
-from openpype.pipeline import legacy_io
+from openpype.pipeline import get_current_task_name
from openpype.hosts.blender.api import plugin, lib, ops
from openpype.hosts.blender.api.pipeline import AVALON_INSTANCES
@@ -35,7 +35,7 @@ class CreateCamera(plugin.Creator):
asset_group = bpy.data.objects.new(name=name, object_data=None)
asset_group.empty_display_type = 'SINGLE_ARROW'
instances.objects.link(asset_group)
- self.data['task'] = legacy_io.Session.get('AVALON_TASK')
+ self.data['task'] = get_current_task_name()
print(f"self.data: {self.data}")
lib.imprint(asset_group, self.data)
@@ -43,7 +43,9 @@ class CreateCamera(plugin.Creator):
bpy.context.view_layer.objects.active = asset_group
selected = lib.get_selection()
for obj in selected:
- obj.select_set(True)
+ if obj.parent in selected:
+ obj.select_set(False)
+ continue
selected.append(asset_group)
bpy.ops.object.parent_set(keep_transform=True)
else:
diff --git a/openpype/hosts/blender/plugins/create/create_layout.py b/openpype/hosts/blender/plugins/create/create_layout.py
index 5949a4b86e..73ed683256 100644
--- a/openpype/hosts/blender/plugins/create/create_layout.py
+++ b/openpype/hosts/blender/plugins/create/create_layout.py
@@ -2,7 +2,7 @@
import bpy
-from openpype.pipeline import legacy_io
+from openpype.pipeline import get_current_task_name
from openpype.hosts.blender.api import plugin, lib, ops
from openpype.hosts.blender.api.pipeline import AVALON_INSTANCES
@@ -34,7 +34,7 @@ class CreateLayout(plugin.Creator):
asset_group = bpy.data.objects.new(name=name, object_data=None)
asset_group.empty_display_type = 'SINGLE_ARROW'
instances.objects.link(asset_group)
- self.data['task'] = legacy_io.Session.get('AVALON_TASK')
+ self.data['task'] = get_current_task_name()
lib.imprint(asset_group, self.data)
# Add selected objects to instance
@@ -42,7 +42,9 @@ class CreateLayout(plugin.Creator):
bpy.context.view_layer.objects.active = asset_group
selected = lib.get_selection()
for obj in selected:
- obj.select_set(True)
+ if obj.parent in selected:
+ obj.select_set(False)
+ continue
selected.append(asset_group)
bpy.ops.object.parent_set(keep_transform=True)
diff --git a/openpype/hosts/blender/plugins/create/create_model.py b/openpype/hosts/blender/plugins/create/create_model.py
index fedc708943..51fc6683f6 100644
--- a/openpype/hosts/blender/plugins/create/create_model.py
+++ b/openpype/hosts/blender/plugins/create/create_model.py
@@ -2,7 +2,7 @@
import bpy
-from openpype.pipeline import legacy_io
+from openpype.pipeline import get_current_task_name
from openpype.hosts.blender.api import plugin, lib, ops
from openpype.hosts.blender.api.pipeline import AVALON_INSTANCES
@@ -34,7 +34,7 @@ class CreateModel(plugin.Creator):
asset_group = bpy.data.objects.new(name=name, object_data=None)
asset_group.empty_display_type = 'SINGLE_ARROW'
instances.objects.link(asset_group)
- self.data['task'] = legacy_io.Session.get('AVALON_TASK')
+ self.data['task'] = get_current_task_name()
lib.imprint(asset_group, self.data)
# Add selected objects to instance
@@ -42,7 +42,9 @@ class CreateModel(plugin.Creator):
bpy.context.view_layer.objects.active = asset_group
selected = lib.get_selection()
for obj in selected:
- obj.select_set(True)
+ if obj.parent in selected:
+ obj.select_set(False)
+ continue
selected.append(asset_group)
bpy.ops.object.parent_set(keep_transform=True)
diff --git a/openpype/hosts/blender/plugins/create/create_pointcache.py b/openpype/hosts/blender/plugins/create/create_pointcache.py
index 38707fd3b1..6220f68dc5 100644
--- a/openpype/hosts/blender/plugins/create/create_pointcache.py
+++ b/openpype/hosts/blender/plugins/create/create_pointcache.py
@@ -2,7 +2,7 @@
import bpy
-from openpype.pipeline import legacy_io
+from openpype.pipeline import get_current_task_name
import openpype.hosts.blender.api.plugin
from openpype.hosts.blender.api import lib
@@ -22,7 +22,7 @@ class CreatePointcache(openpype.hosts.blender.api.plugin.Creator):
name = openpype.hosts.blender.api.plugin.asset_name(asset, subset)
collection = bpy.data.collections.new(name=name)
bpy.context.scene.collection.children.link(collection)
- self.data['task'] = legacy_io.Session.get('AVALON_TASK')
+ self.data['task'] = get_current_task_name()
lib.imprint(collection, self.data)
if (self.options or {}).get("useSelection"):
diff --git a/openpype/hosts/blender/plugins/create/create_render.py b/openpype/hosts/blender/plugins/create/create_render.py
new file mode 100644
index 0000000000..f938a21808
--- /dev/null
+++ b/openpype/hosts/blender/plugins/create/create_render.py
@@ -0,0 +1,53 @@
+"""Create render."""
+import bpy
+
+from openpype.pipeline import get_current_task_name
+from openpype.hosts.blender.api import plugin, lib
+from openpype.hosts.blender.api.render_lib import prepare_rendering
+from openpype.hosts.blender.api.pipeline import AVALON_INSTANCES
+
+
+class CreateRenderlayer(plugin.Creator):
+ """Single baked camera"""
+
+ name = "renderingMain"
+ label = "Render"
+ family = "render"
+ icon = "eye"
+
+ def process(self):
+ # Get Instance Container or create it if it does not exist
+ instances = bpy.data.collections.get(AVALON_INSTANCES)
+ if not instances:
+ instances = bpy.data.collections.new(name=AVALON_INSTANCES)
+ bpy.context.scene.collection.children.link(instances)
+
+ # Create instance object
+ asset = self.data["asset"]
+ subset = self.data["subset"]
+ name = plugin.asset_name(asset, subset)
+ asset_group = bpy.data.collections.new(name=name)
+
+ try:
+ instances.children.link(asset_group)
+ self.data['task'] = get_current_task_name()
+ lib.imprint(asset_group, self.data)
+
+ prepare_rendering(asset_group)
+ except Exception:
+ # Remove the instance if there was an error
+ bpy.data.collections.remove(asset_group)
+ raise
+
+ # TODO: this is undesiderable, but it's the only way to be sure that
+ # the file is saved before the render starts.
+ # Blender, by design, doesn't set the file as dirty if modifications
+ # happen by script. So, when creating the instance and setting the
+ # render settings, the file is not marked as dirty. This means that
+ # there is the risk of sending to deadline a file without the right
+ # settings. Even the validator to check that the file is saved will
+ # detect the file as saved, even if it isn't. The only solution for
+ # now it is to force the file to be saved.
+ bpy.ops.wm.save_as_mainfile(filepath=bpy.data.filepath)
+
+ return asset_group
diff --git a/openpype/hosts/blender/plugins/create/create_review.py b/openpype/hosts/blender/plugins/create/create_review.py
index bf4ea6a7cd..914f249891 100644
--- a/openpype/hosts/blender/plugins/create/create_review.py
+++ b/openpype/hosts/blender/plugins/create/create_review.py
@@ -2,7 +2,7 @@
import bpy
-from openpype.pipeline import legacy_io
+from openpype.pipeline import get_current_task_name
from openpype.hosts.blender.api import plugin, lib, ops
from openpype.hosts.blender.api.pipeline import AVALON_INSTANCES
@@ -33,7 +33,7 @@ class CreateReview(plugin.Creator):
name = plugin.asset_name(asset, subset)
asset_group = bpy.data.collections.new(name=name)
instances.children.link(asset_group)
- self.data['task'] = legacy_io.Session.get('AVALON_TASK')
+ self.data['task'] = get_current_task_name()
lib.imprint(asset_group, self.data)
if (self.options or {}).get("useSelection"):
diff --git a/openpype/hosts/blender/plugins/create/create_rig.py b/openpype/hosts/blender/plugins/create/create_rig.py
index 0abd306c6b..08cc46ee3e 100644
--- a/openpype/hosts/blender/plugins/create/create_rig.py
+++ b/openpype/hosts/blender/plugins/create/create_rig.py
@@ -2,7 +2,7 @@
import bpy
-from openpype.pipeline import legacy_io
+from openpype.pipeline import get_current_task_name
from openpype.hosts.blender.api import plugin, lib, ops
from openpype.hosts.blender.api.pipeline import AVALON_INSTANCES
@@ -34,7 +34,7 @@ class CreateRig(plugin.Creator):
asset_group = bpy.data.objects.new(name=name, object_data=None)
asset_group.empty_display_type = 'SINGLE_ARROW'
instances.objects.link(asset_group)
- self.data['task'] = legacy_io.Session.get('AVALON_TASK')
+ self.data['task'] = get_current_task_name()
lib.imprint(asset_group, self.data)
# Add selected objects to instance
@@ -42,7 +42,9 @@ class CreateRig(plugin.Creator):
bpy.context.view_layer.objects.active = asset_group
selected = lib.get_selection()
for obj in selected:
- obj.select_set(True)
+ if obj.parent in selected:
+ obj.select_set(False)
+ continue
selected.append(asset_group)
bpy.ops.object.parent_set(keep_transform=True)
diff --git a/openpype/hosts/blender/plugins/load/import_workfile.py b/openpype/hosts/blender/plugins/load/import_workfile.py
index bbdf1c7ea0..4f5016d422 100644
--- a/openpype/hosts/blender/plugins/load/import_workfile.py
+++ b/openpype/hosts/blender/plugins/load/import_workfile.py
@@ -52,7 +52,8 @@ class AppendBlendLoader(plugin.AssetLoader):
color = "#775555"
def load(self, context, name=None, namespace=None, data=None):
- append_workfile(context, self.fname, False)
+ path = self.filepath_from_context(context)
+ append_workfile(context, path, False)
# We do not containerize imported content, it remains unmanaged
return
@@ -76,7 +77,8 @@ class ImportBlendLoader(plugin.AssetLoader):
color = "#775555"
def load(self, context, name=None, namespace=None, data=None):
- append_workfile(context, self.fname, True)
+ path = self.filepath_from_context(context)
+ append_workfile(context, path, True)
# We do not containerize imported content, it remains unmanaged
return
diff --git a/openpype/hosts/blender/plugins/load/load_abc.py b/openpype/hosts/blender/plugins/load/load_abc.py
index c1d73eff02..9b3d940536 100644
--- a/openpype/hosts/blender/plugins/load/load_abc.py
+++ b/openpype/hosts/blender/plugins/load/load_abc.py
@@ -26,8 +26,7 @@ class CacheModelLoader(plugin.AssetLoader):
Note:
At least for now it only supports Alembic files.
"""
-
- families = ["model", "pointcache"]
+ families = ["model", "pointcache", "animation"]
representations = ["abc"]
label = "Load Alembic"
@@ -53,16 +52,12 @@ class CacheModelLoader(plugin.AssetLoader):
def _process(self, libpath, asset_group, group_name):
plugin.deselect_all()
- collection = bpy.context.view_layer.active_layer_collection.collection
-
relative = bpy.context.preferences.filepaths.use_relative_paths
bpy.ops.wm.alembic_import(
filepath=libpath,
relative_path=relative
)
- parent = bpy.context.scene.collection
-
imported = lib.get_selection()
# Children must be linked before parents,
@@ -79,6 +74,10 @@ class CacheModelLoader(plugin.AssetLoader):
objects.reverse()
for obj in objects:
+ # Unlink the object from all collections
+ collections = obj.users_collection
+ for collection in collections:
+ collection.objects.unlink(obj)
name = obj.name
obj.name = f"{group_name}:{name}"
if obj.type != 'EMPTY':
@@ -90,7 +89,7 @@ class CacheModelLoader(plugin.AssetLoader):
material_slot.material.name = f"{group_name}:{name_mat}"
if not obj.get(AVALON_PROPERTY):
- obj[AVALON_PROPERTY] = dict()
+ obj[AVALON_PROPERTY] = {}
avalon_info = obj[AVALON_PROPERTY]
avalon_info.update({"container_name": group_name})
@@ -99,6 +98,18 @@ class CacheModelLoader(plugin.AssetLoader):
return objects
+ def _link_objects(self, objects, collection, containers, asset_group):
+ # Link the imported objects to any collection where the asset group is
+ # linked to, except the AVALON_CONTAINERS collection
+ group_collections = [
+ collection
+ for collection in asset_group.users_collection
+ if collection != containers]
+
+ for obj in objects:
+ for collection in group_collections:
+ collection.objects.link(obj)
+
def process_asset(
self, context: dict, name: str, namespace: Optional[str] = None,
options: Optional[Dict] = None
@@ -111,7 +122,7 @@ class CacheModelLoader(plugin.AssetLoader):
options: Additional settings dictionary
"""
- libpath = self.fname
+ libpath = self.filepath_from_context(context)
asset = context["asset"]["name"]
subset = context["subset"]["name"]
@@ -120,18 +131,21 @@ class CacheModelLoader(plugin.AssetLoader):
group_name = plugin.asset_name(asset, subset, unique_number)
namespace = namespace or f"{asset}_{unique_number}"
- avalon_containers = bpy.data.collections.get(AVALON_CONTAINERS)
- if not avalon_containers:
- avalon_containers = bpy.data.collections.new(
- name=AVALON_CONTAINERS)
- bpy.context.scene.collection.children.link(avalon_containers)
+ containers = bpy.data.collections.get(AVALON_CONTAINERS)
+ if not containers:
+ containers = bpy.data.collections.new(name=AVALON_CONTAINERS)
+ bpy.context.scene.collection.children.link(containers)
asset_group = bpy.data.objects.new(group_name, object_data=None)
- avalon_containers.objects.link(asset_group)
+ containers.objects.link(asset_group)
objects = self._process(libpath, asset_group, group_name)
- bpy.context.scene.collection.objects.link(asset_group)
+ # Link the asset group to the active collection
+ collection = bpy.context.view_layer.active_layer_collection.collection
+ collection.objects.link(asset_group)
+
+ self._link_objects(objects, asset_group, containers, asset_group)
asset_group[AVALON_PROPERTY] = {
"schema": "openpype:container-2.0",
@@ -207,7 +221,11 @@ class CacheModelLoader(plugin.AssetLoader):
mat = asset_group.matrix_basis.copy()
self._remove(asset_group)
- self._process(str(libpath), asset_group, object_name)
+ objects = self._process(str(libpath), asset_group, object_name)
+
+ containers = bpy.data.collections.get(AVALON_CONTAINERS)
+ self._link_objects(objects, asset_group, containers, asset_group)
+
asset_group.matrix_basis = mat
metadata["libpath"] = str(libpath)
diff --git a/openpype/hosts/blender/plugins/load/load_action.py b/openpype/hosts/blender/plugins/load/load_action.py
index 3c8fe988f0..3447e67ebf 100644
--- a/openpype/hosts/blender/plugins/load/load_action.py
+++ b/openpype/hosts/blender/plugins/load/load_action.py
@@ -43,7 +43,7 @@ class BlendActionLoader(openpype.hosts.blender.api.plugin.AssetLoader):
options: Additional settings dictionary
"""
- libpath = self.fname
+ libpath = self.filepath_from_context(context)
asset = context["asset"]["name"]
subset = context["subset"]["name"]
lib_container = openpype.hosts.blender.api.plugin.asset_name(asset, subset)
diff --git a/openpype/hosts/blender/plugins/load/load_animation.py b/openpype/hosts/blender/plugins/load/load_animation.py
index 6b8d4abd04..3e7f808903 100644
--- a/openpype/hosts/blender/plugins/load/load_animation.py
+++ b/openpype/hosts/blender/plugins/load/load_animation.py
@@ -34,7 +34,7 @@ class BlendAnimationLoader(plugin.AssetLoader):
context: Full parenthood of representation to load
options: Additional settings dictionary
"""
- libpath = self.fname
+ libpath = self.filepath_from_context(context)
with bpy.data.libraries.load(
libpath, link=True, relative=False
diff --git a/openpype/hosts/blender/plugins/load/load_audio.py b/openpype/hosts/blender/plugins/load/load_audio.py
index 3f4fcc17de..ac8f363316 100644
--- a/openpype/hosts/blender/plugins/load/load_audio.py
+++ b/openpype/hosts/blender/plugins/load/load_audio.py
@@ -38,7 +38,7 @@ class AudioLoader(plugin.AssetLoader):
context: Full parenthood of representation to load
options: Additional settings dictionary
"""
- libpath = self.fname
+ libpath = self.filepath_from_context(context)
asset = context["asset"]["name"]
subset = context["subset"]["name"]
diff --git a/openpype/hosts/blender/plugins/load/load_blend.py b/openpype/hosts/blender/plugins/load/load_blend.py
new file mode 100644
index 0000000000..25d6568889
--- /dev/null
+++ b/openpype/hosts/blender/plugins/load/load_blend.py
@@ -0,0 +1,257 @@
+from typing import Dict, List, Optional
+from pathlib import Path
+
+import bpy
+
+from openpype.pipeline import (
+ legacy_create,
+ get_representation_path,
+ AVALON_CONTAINER_ID,
+)
+from openpype.pipeline.create import get_legacy_creator_by_name
+from openpype.hosts.blender.api import plugin
+from openpype.hosts.blender.api.lib import imprint
+from openpype.hosts.blender.api.pipeline import (
+ AVALON_CONTAINERS,
+ AVALON_PROPERTY,
+)
+
+
+class BlendLoader(plugin.AssetLoader):
+ """Load assets from a .blend file."""
+
+ families = ["model", "rig", "layout", "camera", "blendScene"]
+ representations = ["blend"]
+
+ label = "Append Blend"
+ icon = "code-fork"
+ color = "orange"
+
+ @staticmethod
+ def _get_asset_container(objects):
+ empties = [obj for obj in objects if obj.type == 'EMPTY']
+
+ for empty in empties:
+ if empty.get(AVALON_PROPERTY):
+ return empty
+
+ return None
+
+ @staticmethod
+ def get_all_container_parents(asset_group):
+ parent_containers = []
+ parent = asset_group.parent
+ while parent:
+ if parent.get(AVALON_PROPERTY):
+ parent_containers.append(parent)
+ parent = parent.parent
+
+ return parent_containers
+
+ def _post_process_layout(self, container, asset, representation):
+ rigs = [
+ obj for obj in container.children_recursive
+ if (
+ obj.type == 'EMPTY' and
+ obj.get(AVALON_PROPERTY) and
+ obj.get(AVALON_PROPERTY).get('family') == 'rig'
+ )
+ ]
+
+ for rig in rigs:
+ creator_plugin = get_legacy_creator_by_name("CreateAnimation")
+ legacy_create(
+ creator_plugin,
+ name=rig.name.split(':')[-1] + "_animation",
+ asset=asset,
+ options={
+ "useSelection": False,
+ "asset_group": rig
+ },
+ data={
+ "dependencies": representation
+ }
+ )
+
+ def _process_data(self, libpath, group_name):
+ # Append all the data from the .blend file
+ with bpy.data.libraries.load(
+ libpath, link=False, relative=False
+ ) as (data_from, data_to):
+ for attr in dir(data_to):
+ setattr(data_to, attr, getattr(data_from, attr))
+
+ members = []
+
+ # Rename the object to add the asset name
+ for attr in dir(data_to):
+ for data in getattr(data_to, attr):
+ data.name = f"{group_name}:{data.name}"
+ members.append(data)
+
+ container = self._get_asset_container(data_to.objects)
+ assert container, "No asset group found"
+
+ container.name = group_name
+ container.empty_display_type = 'SINGLE_ARROW'
+
+ # Link the collection to the scene
+ bpy.context.scene.collection.objects.link(container)
+
+ # Link all the container children to the collection
+ for obj in container.children_recursive:
+ bpy.context.scene.collection.objects.link(obj)
+
+ # Remove the library from the blend file
+ library = bpy.data.libraries.get(bpy.path.basename(libpath))
+ bpy.data.libraries.remove(library)
+
+ return container, members
+
+ def process_asset(
+ self, context: dict, name: str, namespace: Optional[str] = None,
+ options: Optional[Dict] = None
+ ) -> Optional[List]:
+ """
+ Arguments:
+ name: Use pre-defined name
+ namespace: Use pre-defined namespace
+ context: Full parenthood of representation to load
+ options: Additional settings dictionary
+ """
+ libpath = self.filepath_from_context(context)
+ asset = context["asset"]["name"]
+ subset = context["subset"]["name"]
+
+ try:
+ family = context["representation"]["context"]["family"]
+ except ValueError:
+ family = "model"
+
+ representation = str(context["representation"]["_id"])
+
+ asset_name = plugin.asset_name(asset, subset)
+ unique_number = plugin.get_unique_number(asset, subset)
+ group_name = plugin.asset_name(asset, subset, unique_number)
+ namespace = namespace or f"{asset}_{unique_number}"
+
+ avalon_container = bpy.data.collections.get(AVALON_CONTAINERS)
+ if not avalon_container:
+ avalon_container = bpy.data.collections.new(name=AVALON_CONTAINERS)
+ bpy.context.scene.collection.children.link(avalon_container)
+
+ container, members = self._process_data(libpath, group_name)
+
+ if family == "layout":
+ self._post_process_layout(container, asset, representation)
+
+ avalon_container.objects.link(container)
+
+ data = {
+ "schema": "openpype:container-2.0",
+ "id": AVALON_CONTAINER_ID,
+ "name": name,
+ "namespace": namespace or '',
+ "loader": str(self.__class__.__name__),
+ "representation": str(context["representation"]["_id"]),
+ "libpath": libpath,
+ "asset_name": asset_name,
+ "parent": str(context["representation"]["parent"]),
+ "family": context["representation"]["context"]["family"],
+ "objectName": group_name,
+ "members": members,
+ }
+
+ container[AVALON_PROPERTY] = data
+
+ objects = [
+ obj for obj in bpy.data.objects
+ if obj.name.startswith(f"{group_name}:")
+ ]
+
+ self[:] = objects
+ return objects
+
+ def exec_update(self, container: Dict, representation: Dict):
+ """
+ Update the loaded asset.
+ """
+ group_name = container["objectName"]
+ asset_group = bpy.data.objects.get(group_name)
+ libpath = Path(get_representation_path(representation)).as_posix()
+
+ assert asset_group, (
+ f"The asset is not loaded: {container['objectName']}"
+ )
+
+ transform = asset_group.matrix_basis.copy()
+ old_data = dict(asset_group.get(AVALON_PROPERTY))
+ parent = asset_group.parent
+
+ self.exec_remove(container)
+
+ asset_group, members = self._process_data(libpath, group_name)
+
+ avalon_container = bpy.data.collections.get(AVALON_CONTAINERS)
+ avalon_container.objects.link(asset_group)
+
+ asset_group.matrix_basis = transform
+ asset_group.parent = parent
+
+ # Restore the old data, but reset memebers, as they don't exist anymore
+ # This avoids a crash, because the memory addresses of those members
+ # are not valid anymore
+ old_data["members"] = []
+ asset_group[AVALON_PROPERTY] = old_data
+
+ new_data = {
+ "libpath": libpath,
+ "representation": str(representation["_id"]),
+ "parent": str(representation["parent"]),
+ "members": members,
+ }
+
+ imprint(asset_group, new_data)
+
+ # We need to update all the parent container members
+ parent_containers = self.get_all_container_parents(asset_group)
+
+ for parent_container in parent_containers:
+ parent_members = parent_container[AVALON_PROPERTY]["members"]
+ parent_container[AVALON_PROPERTY]["members"] = (
+ parent_members + members)
+
+ def exec_remove(self, container: Dict) -> bool:
+ """
+ Remove an existing container from a Blender scene.
+ """
+ group_name = container["objectName"]
+ asset_group = bpy.data.objects.get(group_name)
+
+ attrs = [
+ attr for attr in dir(bpy.data)
+ if isinstance(
+ getattr(bpy.data, attr),
+ bpy.types.bpy_prop_collection
+ )
+ ]
+
+ members = asset_group.get(AVALON_PROPERTY).get("members", [])
+
+ # We need to update all the parent container members
+ parent_containers = self.get_all_container_parents(asset_group)
+
+ for parent in parent_containers:
+ parent.get(AVALON_PROPERTY)["members"] = list(filter(
+ lambda i: i not in members,
+ parent.get(AVALON_PROPERTY).get("members", [])))
+
+ for attr in attrs:
+ for data in getattr(bpy.data, attr):
+ if data in members:
+ # Skip the asset group
+ if data == asset_group:
+ continue
+ getattr(bpy.data, attr).remove(data)
+
+ bpy.data.objects.remove(asset_group)
diff --git a/openpype/hosts/blender/plugins/load/load_camera_abc.py b/openpype/hosts/blender/plugins/load/load_camera_abc.py
index 21b48f409f..05d3fb764d 100644
--- a/openpype/hosts/blender/plugins/load/load_camera_abc.py
+++ b/openpype/hosts/blender/plugins/load/load_camera_abc.py
@@ -81,7 +81,9 @@ class AbcCameraLoader(plugin.AssetLoader):
context: Full parenthood of representation to load
options: Additional settings dictionary
"""
- libpath = self.fname
+
+ libpath = self.filepath_from_context(context)
+
asset = context["asset"]["name"]
subset = context["subset"]["name"]
@@ -98,7 +100,7 @@ class AbcCameraLoader(plugin.AssetLoader):
asset_group = bpy.data.objects.new(group_name, object_data=None)
avalon_container.objects.link(asset_group)
- objects = self._process(libpath, asset_group, group_name)
+ self._process(libpath, asset_group, group_name)
objects = []
nodes = list(asset_group.children)
diff --git a/openpype/hosts/blender/plugins/load/load_camera_blend.py b/openpype/hosts/blender/plugins/load/load_camera_blend.py
deleted file mode 100644
index f00027f0b4..0000000000
--- a/openpype/hosts/blender/plugins/load/load_camera_blend.py
+++ /dev/null
@@ -1,256 +0,0 @@
-"""Load a camera asset in Blender."""
-
-import logging
-from pathlib import Path
-from pprint import pformat
-from typing import Dict, List, Optional
-
-import bpy
-
-from openpype.pipeline import (
- get_representation_path,
- AVALON_CONTAINER_ID,
-)
-from openpype.hosts.blender.api import plugin
-from openpype.hosts.blender.api.pipeline import (
- AVALON_CONTAINERS,
- AVALON_PROPERTY,
-)
-
-logger = logging.getLogger("openpype").getChild(
- "blender").getChild("load_camera")
-
-
-class BlendCameraLoader(plugin.AssetLoader):
- """Load a camera from a .blend file.
-
- Warning:
- Loading the same asset more then once is not properly supported at the
- moment.
- """
-
- families = ["camera"]
- representations = ["blend"]
-
- label = "Link Camera (Blend)"
- icon = "code-fork"
- color = "orange"
-
- def _remove(self, asset_group):
- objects = list(asset_group.children)
-
- for obj in objects:
- if obj.type == 'CAMERA':
- bpy.data.cameras.remove(obj.data)
-
- def _process(self, libpath, asset_group, group_name):
- with bpy.data.libraries.load(
- libpath, link=True, relative=False
- ) as (data_from, data_to):
- data_to.objects = data_from.objects
-
- parent = bpy.context.scene.collection
-
- empties = [obj for obj in data_to.objects if obj.type == 'EMPTY']
-
- container = None
-
- for empty in empties:
- if empty.get(AVALON_PROPERTY):
- container = empty
- break
-
- assert container, "No asset group found"
-
- # Children must be linked before parents,
- # otherwise the hierarchy will break
- objects = []
- nodes = list(container.children)
-
- for obj in nodes:
- obj.parent = asset_group
-
- for obj in nodes:
- objects.append(obj)
- nodes.extend(list(obj.children))
-
- objects.reverse()
-
- for obj in objects:
- parent.objects.link(obj)
-
- for obj in objects:
- local_obj = plugin.prepare_data(obj, group_name)
-
- if local_obj.type != 'EMPTY':
- plugin.prepare_data(local_obj.data, group_name)
-
- if not local_obj.get(AVALON_PROPERTY):
- local_obj[AVALON_PROPERTY] = dict()
-
- avalon_info = local_obj[AVALON_PROPERTY]
- avalon_info.update({"container_name": group_name})
-
- objects.reverse()
-
- bpy.data.orphans_purge(do_local_ids=False)
-
- plugin.deselect_all()
-
- return objects
-
- def process_asset(
- self, context: dict, name: str, namespace: Optional[str] = None,
- options: Optional[Dict] = None
- ) -> Optional[List]:
- """
- Arguments:
- name: Use pre-defined name
- namespace: Use pre-defined namespace
- context: Full parenthood of representation to load
- options: Additional settings dictionary
- """
- libpath = self.fname
- asset = context["asset"]["name"]
- subset = context["subset"]["name"]
-
- asset_name = plugin.asset_name(asset, subset)
- unique_number = plugin.get_unique_number(asset, subset)
- group_name = plugin.asset_name(asset, subset, unique_number)
- namespace = namespace or f"{asset}_{unique_number}"
-
- avalon_container = bpy.data.collections.get(AVALON_CONTAINERS)
- if not avalon_container:
- avalon_container = bpy.data.collections.new(name=AVALON_CONTAINERS)
- bpy.context.scene.collection.children.link(avalon_container)
-
- asset_group = bpy.data.objects.new(group_name, object_data=None)
- asset_group.empty_display_type = 'SINGLE_ARROW'
- avalon_container.objects.link(asset_group)
-
- objects = self._process(libpath, asset_group, group_name)
-
- bpy.context.scene.collection.objects.link(asset_group)
-
- asset_group[AVALON_PROPERTY] = {
- "schema": "openpype:container-2.0",
- "id": AVALON_CONTAINER_ID,
- "name": name,
- "namespace": namespace or '',
- "loader": str(self.__class__.__name__),
- "representation": str(context["representation"]["_id"]),
- "libpath": libpath,
- "asset_name": asset_name,
- "parent": str(context["representation"]["parent"]),
- "family": context["representation"]["context"]["family"],
- "objectName": group_name
- }
-
- self[:] = objects
- return objects
-
- def exec_update(self, container: Dict, representation: Dict):
- """Update the loaded asset.
-
- This will remove all children of the asset group, load the new ones
- and add them as children of the group.
- """
- object_name = container["objectName"]
- asset_group = bpy.data.objects.get(object_name)
- libpath = Path(get_representation_path(representation))
- extension = libpath.suffix.lower()
-
- self.log.info(
- "Container: %s\nRepresentation: %s",
- pformat(container, indent=2),
- pformat(representation, indent=2),
- )
-
- assert asset_group, (
- f"The asset is not loaded: {container['objectName']}"
- )
- assert libpath, (
- "No existing library file found for {container['objectName']}"
- )
- assert libpath.is_file(), (
- f"The file doesn't exist: {libpath}"
- )
- assert extension in plugin.VALID_EXTENSIONS, (
- f"Unsupported file: {libpath}"
- )
-
- metadata = asset_group.get(AVALON_PROPERTY)
- group_libpath = metadata["libpath"]
-
- normalized_group_libpath = (
- str(Path(bpy.path.abspath(group_libpath)).resolve())
- )
- normalized_libpath = (
- str(Path(bpy.path.abspath(str(libpath))).resolve())
- )
- self.log.debug(
- "normalized_group_libpath:\n %s\nnormalized_libpath:\n %s",
- normalized_group_libpath,
- normalized_libpath,
- )
- if normalized_group_libpath == normalized_libpath:
- self.log.info("Library already loaded, not updating...")
- return
-
- # Check how many assets use the same library
- count = 0
- for obj in bpy.data.collections.get(AVALON_CONTAINERS).objects:
- if obj.get(AVALON_PROPERTY).get('libpath') == group_libpath:
- count += 1
-
- mat = asset_group.matrix_basis.copy()
-
- self._remove(asset_group)
-
- # If it is the last object to use that library, remove it
- if count == 1:
- library = bpy.data.libraries.get(bpy.path.basename(group_libpath))
- if library:
- bpy.data.libraries.remove(library)
-
- self._process(str(libpath), asset_group, object_name)
-
- asset_group.matrix_basis = mat
-
- metadata["libpath"] = str(libpath)
- metadata["representation"] = str(representation["_id"])
- metadata["parent"] = str(representation["parent"])
-
- def exec_remove(self, container: Dict) -> bool:
- """Remove an existing container from a Blender scene.
-
- Arguments:
- container (openpype:container-1.0): Container to remove,
- from `host.ls()`.
-
- Returns:
- bool: Whether the container was deleted.
- """
- object_name = container["objectName"]
- asset_group = bpy.data.objects.get(object_name)
- libpath = asset_group.get(AVALON_PROPERTY).get('libpath')
-
- # Check how many assets use the same library
- count = 0
- for obj in bpy.data.collections.get(AVALON_CONTAINERS).objects:
- if obj.get(AVALON_PROPERTY).get('libpath') == libpath:
- count += 1
-
- if not asset_group:
- return False
-
- self._remove(asset_group)
-
- bpy.data.objects.remove(asset_group)
-
- # If it is the last object to use that library, remove it
- if count == 1:
- library = bpy.data.libraries.get(bpy.path.basename(libpath))
- bpy.data.libraries.remove(library)
-
- return True
diff --git a/openpype/hosts/blender/plugins/load/load_camera_fbx.py b/openpype/hosts/blender/plugins/load/load_camera_fbx.py
index 97f844e610..3cca6e7fd3 100644
--- a/openpype/hosts/blender/plugins/load/load_camera_fbx.py
+++ b/openpype/hosts/blender/plugins/load/load_camera_fbx.py
@@ -86,7 +86,7 @@ class FbxCameraLoader(plugin.AssetLoader):
context: Full parenthood of representation to load
options: Additional settings dictionary
"""
- libpath = self.fname
+ libpath = self.filepath_from_context(context)
asset = context["asset"]["name"]
subset = context["subset"]["name"]
@@ -103,7 +103,7 @@ class FbxCameraLoader(plugin.AssetLoader):
asset_group = bpy.data.objects.new(group_name, object_data=None)
avalon_container.objects.link(asset_group)
- objects = self._process(libpath, asset_group, group_name)
+ self._process(libpath, asset_group, group_name)
objects = []
nodes = list(asset_group.children)
diff --git a/openpype/hosts/blender/plugins/load/load_fbx.py b/openpype/hosts/blender/plugins/load/load_fbx.py
index ee2e7d175c..e129ea6754 100644
--- a/openpype/hosts/blender/plugins/load/load_fbx.py
+++ b/openpype/hosts/blender/plugins/load/load_fbx.py
@@ -130,7 +130,7 @@ class FbxModelLoader(plugin.AssetLoader):
context: Full parenthood of representation to load
options: Additional settings dictionary
"""
- libpath = self.fname
+ libpath = self.filepath_from_context(context)
asset = context["asset"]["name"]
subset = context["subset"]["name"]
diff --git a/openpype/hosts/blender/plugins/load/load_layout_blend.py b/openpype/hosts/blender/plugins/load/load_layout_blend.py
deleted file mode 100644
index 7d2fd23444..0000000000
--- a/openpype/hosts/blender/plugins/load/load_layout_blend.py
+++ /dev/null
@@ -1,469 +0,0 @@
-"""Load a layout in Blender."""
-
-from pathlib import Path
-from pprint import pformat
-from typing import Dict, List, Optional
-
-import bpy
-
-from openpype.pipeline import (
- legacy_create,
- get_representation_path,
- AVALON_CONTAINER_ID,
-)
-from openpype.pipeline.create import get_legacy_creator_by_name
-from openpype.hosts.blender.api import plugin
-from openpype.hosts.blender.api.pipeline import (
- AVALON_CONTAINERS,
- AVALON_PROPERTY,
-)
-
-
-class BlendLayoutLoader(plugin.AssetLoader):
- """Load layout from a .blend file."""
-
- families = ["layout"]
- representations = ["blend"]
-
- label = "Link Layout"
- icon = "code-fork"
- color = "orange"
-
- def _remove(self, asset_group):
- objects = list(asset_group.children)
-
- for obj in objects:
- if obj.type == 'MESH':
- for material_slot in list(obj.material_slots):
- if material_slot.material:
- bpy.data.materials.remove(material_slot.material)
- bpy.data.meshes.remove(obj.data)
- elif obj.type == 'ARMATURE':
- objects.extend(obj.children)
- bpy.data.armatures.remove(obj.data)
- elif obj.type == 'CURVE':
- bpy.data.curves.remove(obj.data)
- elif obj.type == 'EMPTY':
- objects.extend(obj.children)
- bpy.data.objects.remove(obj)
-
- def _remove_asset_and_library(self, asset_group):
- if not asset_group.get(AVALON_PROPERTY):
- return
-
- libpath = asset_group.get(AVALON_PROPERTY).get('libpath')
-
- if not libpath:
- return
-
- # Check how many assets use the same library
- count = 0
- for obj in bpy.data.collections.get(AVALON_CONTAINERS).all_objects:
- if obj.get(AVALON_PROPERTY).get('libpath') == libpath:
- count += 1
-
- self._remove(asset_group)
-
- bpy.data.objects.remove(asset_group)
-
- # If it is the last object to use that library, remove it
- if count == 1:
- library = bpy.data.libraries.get(bpy.path.basename(libpath))
- if library:
- bpy.data.libraries.remove(library)
-
- def _process(
- self, libpath, asset_group, group_name, asset, representation,
- actions, anim_instances
- ):
- with bpy.data.libraries.load(
- libpath, link=True, relative=False
- ) as (data_from, data_to):
- data_to.objects = data_from.objects
-
- parent = bpy.context.scene.collection
-
- empties = [obj for obj in data_to.objects if obj.type == 'EMPTY']
-
- container = None
-
- for empty in empties:
- if (empty.get(AVALON_PROPERTY) and
- empty.get(AVALON_PROPERTY).get('family') == 'layout'):
- container = empty
- break
-
- assert container, "No asset group found"
-
- # Children must be linked before parents,
- # otherwise the hierarchy will break
- objects = []
- nodes = list(container.children)
-
- allowed_types = ['ARMATURE', 'MESH', 'EMPTY']
-
- for obj in nodes:
- if obj.type in allowed_types:
- obj.parent = asset_group
-
- for obj in nodes:
- if obj.type in allowed_types:
- objects.append(obj)
- nodes.extend(list(obj.children))
-
- objects.reverse()
-
- constraints = []
-
- armatures = [obj for obj in objects if obj.type == 'ARMATURE']
-
- for armature in armatures:
- for bone in armature.pose.bones:
- for constraint in bone.constraints:
- if hasattr(constraint, 'target'):
- constraints.append(constraint)
-
- for obj in objects:
- parent.objects.link(obj)
-
- for obj in objects:
- local_obj = plugin.prepare_data(obj)
-
- action = None
-
- if actions:
- action = actions.get(local_obj.name, None)
-
- if local_obj.type == 'MESH':
- plugin.prepare_data(local_obj.data)
-
- if obj != local_obj:
- for constraint in constraints:
- if constraint.target == obj:
- constraint.target = local_obj
-
- for material_slot in local_obj.material_slots:
- if material_slot.material:
- plugin.prepare_data(material_slot.material)
- elif local_obj.type == 'ARMATURE':
- plugin.prepare_data(local_obj.data)
-
- if action:
- if local_obj.animation_data is None:
- local_obj.animation_data_create()
- local_obj.animation_data.action = action
- elif (local_obj.animation_data and
- local_obj.animation_data.action):
- plugin.prepare_data(
- local_obj.animation_data.action)
-
- # Set link the drivers to the local object
- if local_obj.data.animation_data:
- for d in local_obj.data.animation_data.drivers:
- for v in d.driver.variables:
- for t in v.targets:
- t.id = local_obj
-
- elif local_obj.type == 'EMPTY':
- if (not anim_instances or
- (anim_instances and
- local_obj.name not in anim_instances.keys())):
- avalon = local_obj.get(AVALON_PROPERTY)
- if avalon and avalon.get('family') == 'rig':
- creator_plugin = get_legacy_creator_by_name(
- "CreateAnimation")
- if not creator_plugin:
- raise ValueError(
- "Creator plugin \"CreateAnimation\" was "
- "not found.")
-
- legacy_create(
- creator_plugin,
- name=local_obj.name.split(':')[-1] + "_animation",
- asset=asset,
- options={"useSelection": False,
- "asset_group": local_obj},
- data={"dependencies": representation}
- )
-
- if not local_obj.get(AVALON_PROPERTY):
- local_obj[AVALON_PROPERTY] = dict()
-
- avalon_info = local_obj[AVALON_PROPERTY]
- avalon_info.update({"container_name": group_name})
-
- objects.reverse()
-
- armatures = [
- obj for obj in bpy.data.objects
- if obj.type == 'ARMATURE' and obj.library is None]
- arm_act = {}
-
- # The armatures with an animation need to be at the center of the
- # scene to be hooked correctly by the curves modifiers.
- for armature in armatures:
- if armature.animation_data and armature.animation_data.action:
- arm_act[armature] = armature.animation_data.action
- armature.animation_data.action = None
- armature.location = (0.0, 0.0, 0.0)
- for bone in armature.pose.bones:
- bone.location = (0.0, 0.0, 0.0)
- bone.rotation_euler = (0.0, 0.0, 0.0)
-
- curves = [obj for obj in data_to.objects if obj.type == 'CURVE']
-
- for curve in curves:
- curve_name = curve.name.split(':')[0]
- curve_obj = bpy.data.objects.get(curve_name)
-
- local_obj = plugin.prepare_data(curve)
- plugin.prepare_data(local_obj.data)
-
- # Curves need to reset the hook, but to do that they need to be
- # in the view layer.
- parent.objects.link(local_obj)
- plugin.deselect_all()
- local_obj.select_set(True)
- bpy.context.view_layer.objects.active = local_obj
- if local_obj.library is None:
- bpy.ops.object.mode_set(mode='EDIT')
- bpy.ops.object.hook_reset()
- bpy.ops.object.mode_set(mode='OBJECT')
- parent.objects.unlink(local_obj)
-
- local_obj.use_fake_user = True
-
- for mod in local_obj.modifiers:
- mod.object = bpy.data.objects.get(f"{mod.object.name}")
-
- if not local_obj.get(AVALON_PROPERTY):
- local_obj[AVALON_PROPERTY] = dict()
-
- avalon_info = local_obj[AVALON_PROPERTY]
- avalon_info.update({"container_name": group_name})
-
- local_obj.parent = curve_obj
- objects.append(local_obj)
-
- for armature in armatures:
- if arm_act.get(armature):
- armature.animation_data.action = arm_act[armature]
-
- while bpy.data.orphans_purge(do_local_ids=False):
- pass
-
- plugin.deselect_all()
-
- return objects
-
- def process_asset(
- self, context: dict, name: str, namespace: Optional[str] = None,
- options: Optional[Dict] = None
- ) -> Optional[List]:
- """
- Arguments:
- name: Use pre-defined name
- namespace: Use pre-defined namespace
- context: Full parenthood of representation to load
- options: Additional settings dictionary
- """
- libpath = self.fname
- asset = context["asset"]["name"]
- subset = context["subset"]["name"]
- representation = str(context["representation"]["_id"])
-
- asset_name = plugin.asset_name(asset, subset)
- unique_number = plugin.get_unique_number(asset, subset)
- group_name = plugin.asset_name(asset, subset, unique_number)
- namespace = namespace or f"{asset}_{unique_number}"
-
- avalon_container = bpy.data.collections.get(AVALON_CONTAINERS)
- if not avalon_container:
- avalon_container = bpy.data.collections.new(name=AVALON_CONTAINERS)
- bpy.context.scene.collection.children.link(avalon_container)
-
- asset_group = bpy.data.objects.new(group_name, object_data=None)
- asset_group.empty_display_type = 'SINGLE_ARROW'
- avalon_container.objects.link(asset_group)
-
- objects = self._process(
- libpath, asset_group, group_name, asset, representation,
- None, None)
-
- for child in asset_group.children:
- if child.get(AVALON_PROPERTY):
- avalon_container.objects.link(child)
-
- bpy.context.scene.collection.objects.link(asset_group)
-
- asset_group[AVALON_PROPERTY] = {
- "schema": "openpype:container-2.0",
- "id": AVALON_CONTAINER_ID,
- "name": name,
- "namespace": namespace or '',
- "loader": str(self.__class__.__name__),
- "representation": str(context["representation"]["_id"]),
- "libpath": libpath,
- "asset_name": asset_name,
- "parent": str(context["representation"]["parent"]),
- "family": context["representation"]["context"]["family"],
- "objectName": group_name
- }
-
- self[:] = objects
- return objects
-
- def update(self, container: Dict, representation: Dict):
- """Update the loaded asset.
-
- This will remove all objects of the current collection, load the new
- ones and add them to the collection.
- If the objects of the collection are used in another collection they
- will not be removed, only unlinked. Normally this should not be the
- case though.
-
- Warning:
- No nested collections are supported at the moment!
- """
- object_name = container["objectName"]
- asset_group = bpy.data.objects.get(object_name)
- libpath = Path(get_representation_path(representation))
- extension = libpath.suffix.lower()
-
- self.log.info(
- "Container: %s\nRepresentation: %s",
- pformat(container, indent=2),
- pformat(representation, indent=2),
- )
-
- assert asset_group, (
- f"The asset is not loaded: {container['objectName']}"
- )
- assert libpath, (
- "No existing library file found for {container['objectName']}"
- )
- assert libpath.is_file(), (
- f"The file doesn't exist: {libpath}"
- )
- assert extension in plugin.VALID_EXTENSIONS, (
- f"Unsupported file: {libpath}"
- )
-
- metadata = asset_group.get(AVALON_PROPERTY)
- group_libpath = metadata["libpath"]
-
- normalized_group_libpath = (
- str(Path(bpy.path.abspath(group_libpath)).resolve())
- )
- normalized_libpath = (
- str(Path(bpy.path.abspath(str(libpath))).resolve())
- )
- self.log.debug(
- "normalized_group_libpath:\n %s\nnormalized_libpath:\n %s",
- normalized_group_libpath,
- normalized_libpath,
- )
- if normalized_group_libpath == normalized_libpath:
- self.log.info("Library already loaded, not updating...")
- return
-
- actions = {}
- anim_instances = {}
-
- for obj in asset_group.children:
- obj_meta = obj.get(AVALON_PROPERTY)
- if obj_meta.get('family') == 'rig':
- # Get animation instance
- collections = list(obj.users_collection)
- for c in collections:
- avalon = c.get(AVALON_PROPERTY)
- if avalon and avalon.get('family') == 'animation':
- anim_instances[obj.name] = c.name
- break
-
- # Get armature's action
- rig = None
- for child in obj.children:
- if child.type == 'ARMATURE':
- rig = child
- break
- if not rig:
- raise Exception("No armature in the rig asset group.")
- if rig.animation_data and rig.animation_data.action:
- instance_name = obj_meta.get('instance_name')
- actions[instance_name] = rig.animation_data.action
-
- mat = asset_group.matrix_basis.copy()
-
- # Remove the children of the asset_group first
- for child in list(asset_group.children):
- self._remove_asset_and_library(child)
-
- # Check how many assets use the same library
- count = 0
- for obj in bpy.data.collections.get(AVALON_CONTAINERS).objects:
- if obj.get(AVALON_PROPERTY).get('libpath') == group_libpath:
- count += 1
-
- self._remove(asset_group)
-
- # If it is the last object to use that library, remove it
- if count == 1:
- library = bpy.data.libraries.get(bpy.path.basename(group_libpath))
- if library:
- bpy.data.libraries.remove(library)
-
- asset = container.get("asset_name").split("_")[0]
-
- self._process(
- str(libpath), asset_group, object_name, asset,
- str(representation.get("_id")), actions, anim_instances
- )
-
- # Link the new objects to the animation collection
- for inst in anim_instances.keys():
- try:
- obj = bpy.data.objects[inst]
- bpy.data.collections[anim_instances[inst]].objects.link(obj)
- except KeyError:
- self.log.info(f"Object {inst} does not exist anymore.")
- coll = bpy.data.collections.get(anim_instances[inst])
- if (coll):
- bpy.data.collections.remove(coll)
-
- avalon_container = bpy.data.collections.get(AVALON_CONTAINERS)
- for child in asset_group.children:
- if child.get(AVALON_PROPERTY):
- avalon_container.objects.link(child)
-
- asset_group.matrix_basis = mat
-
- metadata["libpath"] = str(libpath)
- metadata["representation"] = str(representation["_id"])
-
- def exec_remove(self, container: Dict) -> bool:
- """Remove an existing container from a Blender scene.
-
- Arguments:
- container (openpype:container-1.0): Container to remove,
- from `host.ls()`.
-
- Returns:
- bool: Whether the container was deleted.
-
- Warning:
- No nested collections are supported at the moment!
- """
- object_name = container["objectName"]
- asset_group = bpy.data.objects.get(object_name)
-
- if not asset_group:
- return False
-
- # Remove the children of the asset_group first
- for child in list(asset_group.children):
- self._remove_asset_and_library(child)
-
- self._remove_asset_and_library(asset_group)
-
- return True
diff --git a/openpype/hosts/blender/plugins/load/load_layout_json.py b/openpype/hosts/blender/plugins/load/load_layout_json.py
index eca098627e..81683b8de8 100644
--- a/openpype/hosts/blender/plugins/load/load_layout_json.py
+++ b/openpype/hosts/blender/plugins/load/load_layout_json.py
@@ -144,7 +144,7 @@ class JsonLayoutLoader(plugin.AssetLoader):
context: Full parenthood of representation to load
options: Additional settings dictionary
"""
- libpath = self.fname
+ libpath = self.filepath_from_context(context)
asset = context["asset"]["name"]
subset = context["subset"]["name"]
diff --git a/openpype/hosts/blender/plugins/load/load_look.py b/openpype/hosts/blender/plugins/load/load_look.py
index 70d1b95f02..c121f55633 100644
--- a/openpype/hosts/blender/plugins/load/load_look.py
+++ b/openpype/hosts/blender/plugins/load/load_look.py
@@ -92,7 +92,7 @@ class BlendLookLoader(plugin.AssetLoader):
options: Additional settings dictionary
"""
- libpath = self.fname
+ libpath = self.filepath_from_context(context)
asset = context["asset"]["name"]
subset = context["subset"]["name"]
diff --git a/openpype/hosts/blender/plugins/load/load_model.py b/openpype/hosts/blender/plugins/load/load_model.py
deleted file mode 100644
index 0a5d98ffa0..0000000000
--- a/openpype/hosts/blender/plugins/load/load_model.py
+++ /dev/null
@@ -1,296 +0,0 @@
-"""Load a model asset in Blender."""
-
-from pathlib import Path
-from pprint import pformat
-from typing import Dict, List, Optional
-
-import bpy
-
-from openpype.pipeline import (
- get_representation_path,
- AVALON_CONTAINER_ID,
-)
-from openpype.hosts.blender.api import plugin
-from openpype.hosts.blender.api.pipeline import (
- AVALON_CONTAINERS,
- AVALON_PROPERTY,
-)
-
-
-class BlendModelLoader(plugin.AssetLoader):
- """Load models from a .blend file.
-
- Because they come from a .blend file we can simply link the collection that
- contains the model. There is no further need to 'containerise' it.
- """
-
- families = ["model"]
- representations = ["blend"]
-
- label = "Link Model"
- icon = "code-fork"
- color = "orange"
-
- def _remove(self, asset_group):
- objects = list(asset_group.children)
-
- for obj in objects:
- if obj.type == 'MESH':
- for material_slot in list(obj.material_slots):
- bpy.data.materials.remove(material_slot.material)
- bpy.data.meshes.remove(obj.data)
- elif obj.type == 'EMPTY':
- objects.extend(obj.children)
- bpy.data.objects.remove(obj)
-
- def _process(self, libpath, asset_group, group_name):
- with bpy.data.libraries.load(
- libpath, link=True, relative=False
- ) as (data_from, data_to):
- data_to.objects = data_from.objects
-
- parent = bpy.context.scene.collection
-
- empties = [obj for obj in data_to.objects if obj.type == 'EMPTY']
-
- container = None
-
- for empty in empties:
- if empty.get(AVALON_PROPERTY):
- container = empty
- break
-
- assert container, "No asset group found"
-
- # Children must be linked before parents,
- # otherwise the hierarchy will break
- objects = []
- nodes = list(container.children)
-
- for obj in nodes:
- obj.parent = asset_group
-
- for obj in nodes:
- objects.append(obj)
- nodes.extend(list(obj.children))
-
- objects.reverse()
-
- for obj in objects:
- parent.objects.link(obj)
-
- for obj in objects:
- local_obj = plugin.prepare_data(obj, group_name)
- if local_obj.type != 'EMPTY':
- plugin.prepare_data(local_obj.data, group_name)
-
- for material_slot in local_obj.material_slots:
- if material_slot.material:
- plugin.prepare_data(material_slot.material, group_name)
-
- if not local_obj.get(AVALON_PROPERTY):
- local_obj[AVALON_PROPERTY] = dict()
-
- avalon_info = local_obj[AVALON_PROPERTY]
- avalon_info.update({"container_name": group_name})
-
- objects.reverse()
-
- bpy.data.orphans_purge(do_local_ids=False)
-
- plugin.deselect_all()
-
- return objects
-
- def process_asset(
- self, context: dict, name: str, namespace: Optional[str] = None,
- options: Optional[Dict] = None
- ) -> Optional[List]:
- """
- Arguments:
- name: Use pre-defined name
- namespace: Use pre-defined namespace
- context: Full parenthood of representation to load
- options: Additional settings dictionary
- """
- libpath = self.fname
- asset = context["asset"]["name"]
- subset = context["subset"]["name"]
-
- asset_name = plugin.asset_name(asset, subset)
- unique_number = plugin.get_unique_number(asset, subset)
- group_name = plugin.asset_name(asset, subset, unique_number)
- namespace = namespace or f"{asset}_{unique_number}"
-
- avalon_container = bpy.data.collections.get(AVALON_CONTAINERS)
- if not avalon_container:
- avalon_container = bpy.data.collections.new(name=AVALON_CONTAINERS)
- bpy.context.scene.collection.children.link(avalon_container)
-
- asset_group = bpy.data.objects.new(group_name, object_data=None)
- asset_group.empty_display_type = 'SINGLE_ARROW'
- avalon_container.objects.link(asset_group)
-
- plugin.deselect_all()
-
- if options is not None:
- parent = options.get('parent')
- transform = options.get('transform')
-
- if parent and transform:
- location = transform.get('translation')
- rotation = transform.get('rotation')
- scale = transform.get('scale')
-
- asset_group.location = (
- location.get('x'),
- location.get('y'),
- location.get('z')
- )
- asset_group.rotation_euler = (
- rotation.get('x'),
- rotation.get('y'),
- rotation.get('z')
- )
- asset_group.scale = (
- scale.get('x'),
- scale.get('y'),
- scale.get('z')
- )
-
- bpy.context.view_layer.objects.active = parent
- asset_group.select_set(True)
-
- bpy.ops.object.parent_set(keep_transform=True)
-
- plugin.deselect_all()
-
- objects = self._process(libpath, asset_group, group_name)
-
- bpy.context.scene.collection.objects.link(asset_group)
-
- asset_group[AVALON_PROPERTY] = {
- "schema": "openpype:container-2.0",
- "id": AVALON_CONTAINER_ID,
- "name": name,
- "namespace": namespace or '',
- "loader": str(self.__class__.__name__),
- "representation": str(context["representation"]["_id"]),
- "libpath": libpath,
- "asset_name": asset_name,
- "parent": str(context["representation"]["parent"]),
- "family": context["representation"]["context"]["family"],
- "objectName": group_name
- }
-
- self[:] = objects
- return objects
-
- def exec_update(self, container: Dict, representation: Dict):
- """Update the loaded asset.
-
- This will remove all objects of the current collection, load the new
- ones and add them to the collection.
- If the objects of the collection are used in another collection they
- will not be removed, only unlinked. Normally this should not be the
- case though.
- """
- object_name = container["objectName"]
- asset_group = bpy.data.objects.get(object_name)
- libpath = Path(get_representation_path(representation))
- extension = libpath.suffix.lower()
-
- self.log.info(
- "Container: %s\nRepresentation: %s",
- pformat(container, indent=2),
- pformat(representation, indent=2),
- )
-
- assert asset_group, (
- f"The asset is not loaded: {container['objectName']}"
- )
- assert libpath, (
- "No existing library file found for {container['objectName']}"
- )
- assert libpath.is_file(), (
- f"The file doesn't exist: {libpath}"
- )
- assert extension in plugin.VALID_EXTENSIONS, (
- f"Unsupported file: {libpath}"
- )
-
- metadata = asset_group.get(AVALON_PROPERTY)
- group_libpath = metadata["libpath"]
-
- normalized_group_libpath = (
- str(Path(bpy.path.abspath(group_libpath)).resolve())
- )
- normalized_libpath = (
- str(Path(bpy.path.abspath(str(libpath))).resolve())
- )
- self.log.debug(
- "normalized_group_libpath:\n %s\nnormalized_libpath:\n %s",
- normalized_group_libpath,
- normalized_libpath,
- )
- if normalized_group_libpath == normalized_libpath:
- self.log.info("Library already loaded, not updating...")
- return
-
- # Check how many assets use the same library
- count = 0
- for obj in bpy.data.collections.get(AVALON_CONTAINERS).objects:
- if obj.get(AVALON_PROPERTY).get('libpath') == group_libpath:
- count += 1
-
- mat = asset_group.matrix_basis.copy()
-
- self._remove(asset_group)
-
- # If it is the last object to use that library, remove it
- if count == 1:
- library = bpy.data.libraries.get(bpy.path.basename(group_libpath))
- if library:
- bpy.data.libraries.remove(library)
-
- self._process(str(libpath), asset_group, object_name)
-
- asset_group.matrix_basis = mat
-
- metadata["libpath"] = str(libpath)
- metadata["representation"] = str(representation["_id"])
- metadata["parent"] = str(representation["parent"])
-
- def exec_remove(self, container: Dict) -> bool:
- """Remove an existing container from a Blender scene.
-
- Arguments:
- container (openpype:container-1.0): Container to remove,
- from `host.ls()`.
-
- Returns:
- bool: Whether the container was deleted.
- """
- object_name = container["objectName"]
- asset_group = bpy.data.objects.get(object_name)
- libpath = asset_group.get(AVALON_PROPERTY).get('libpath')
-
- # Check how many assets use the same library
- count = 0
- for obj in bpy.data.collections.get(AVALON_CONTAINERS).objects:
- if obj.get(AVALON_PROPERTY).get('libpath') == libpath:
- count += 1
-
- if not asset_group:
- return False
-
- self._remove(asset_group)
-
- bpy.data.objects.remove(asset_group)
-
- # If it is the last object to use that library, remove it
- if count == 1:
- library = bpy.data.libraries.get(bpy.path.basename(libpath))
- bpy.data.libraries.remove(library)
-
- return True
diff --git a/openpype/hosts/blender/plugins/load/load_rig.py b/openpype/hosts/blender/plugins/load/load_rig.py
deleted file mode 100644
index 1d23a70061..0000000000
--- a/openpype/hosts/blender/plugins/load/load_rig.py
+++ /dev/null
@@ -1,417 +0,0 @@
-"""Load a rig asset in Blender."""
-
-from pathlib import Path
-from pprint import pformat
-from typing import Dict, List, Optional
-
-import bpy
-
-from openpype.pipeline import (
- legacy_create,
- get_representation_path,
- AVALON_CONTAINER_ID,
-)
-from openpype.pipeline.create import get_legacy_creator_by_name
-from openpype.hosts.blender.api import (
- plugin,
- get_selection,
-)
-from openpype.hosts.blender.api.pipeline import (
- AVALON_CONTAINERS,
- AVALON_PROPERTY,
-)
-
-
-class BlendRigLoader(plugin.AssetLoader):
- """Load rigs from a .blend file."""
-
- families = ["rig"]
- representations = ["blend"]
-
- label = "Link Rig"
- icon = "code-fork"
- color = "orange"
-
- def _remove(self, asset_group):
- objects = list(asset_group.children)
-
- for obj in objects:
- if obj.type == 'MESH':
- for material_slot in list(obj.material_slots):
- if material_slot.material:
- bpy.data.materials.remove(material_slot.material)
- bpy.data.meshes.remove(obj.data)
- elif obj.type == 'ARMATURE':
- objects.extend(obj.children)
- bpy.data.armatures.remove(obj.data)
- elif obj.type == 'CURVE':
- bpy.data.curves.remove(obj.data)
- elif obj.type == 'EMPTY':
- objects.extend(obj.children)
- bpy.data.objects.remove(obj)
-
- def _process(self, libpath, asset_group, group_name, action):
- with bpy.data.libraries.load(
- libpath, link=True, relative=False
- ) as (data_from, data_to):
- data_to.objects = data_from.objects
-
- parent = bpy.context.scene.collection
-
- empties = [obj for obj in data_to.objects if obj.type == 'EMPTY']
-
- container = None
-
- for empty in empties:
- if empty.get(AVALON_PROPERTY):
- container = empty
- break
-
- assert container, "No asset group found"
-
- # Children must be linked before parents,
- # otherwise the hierarchy will break
- objects = []
- nodes = list(container.children)
-
- allowed_types = ['ARMATURE', 'MESH']
-
- for obj in nodes:
- if obj.type in allowed_types:
- obj.parent = asset_group
-
- for obj in nodes:
- if obj.type in allowed_types:
- objects.append(obj)
- nodes.extend(list(obj.children))
-
- objects.reverse()
-
- constraints = []
-
- armatures = [obj for obj in objects if obj.type == 'ARMATURE']
-
- for armature in armatures:
- for bone in armature.pose.bones:
- for constraint in bone.constraints:
- if hasattr(constraint, 'target'):
- constraints.append(constraint)
-
- for obj in objects:
- parent.objects.link(obj)
-
- for obj in objects:
- local_obj = plugin.prepare_data(obj, group_name)
-
- if local_obj.type == 'MESH':
- plugin.prepare_data(local_obj.data, group_name)
-
- if obj != local_obj:
- for constraint in constraints:
- if constraint.target == obj:
- constraint.target = local_obj
-
- for material_slot in local_obj.material_slots:
- if material_slot.material:
- plugin.prepare_data(material_slot.material, group_name)
- elif local_obj.type == 'ARMATURE':
- plugin.prepare_data(local_obj.data, group_name)
-
- if action is not None:
- if local_obj.animation_data is None:
- local_obj.animation_data_create()
- local_obj.animation_data.action = action
- elif (local_obj.animation_data and
- local_obj.animation_data.action is not None):
- plugin.prepare_data(
- local_obj.animation_data.action, group_name)
-
- # Set link the drivers to the local object
- if local_obj.data.animation_data:
- for d in local_obj.data.animation_data.drivers:
- for v in d.driver.variables:
- for t in v.targets:
- t.id = local_obj
-
- if not local_obj.get(AVALON_PROPERTY):
- local_obj[AVALON_PROPERTY] = dict()
-
- avalon_info = local_obj[AVALON_PROPERTY]
- avalon_info.update({"container_name": group_name})
-
- objects.reverse()
-
- curves = [obj for obj in data_to.objects if obj.type == 'CURVE']
-
- for curve in curves:
- local_obj = plugin.prepare_data(curve, group_name)
- plugin.prepare_data(local_obj.data, group_name)
-
- local_obj.use_fake_user = True
-
- for mod in local_obj.modifiers:
- mod_target_name = mod.object.name
- mod.object = bpy.data.objects.get(
- f"{group_name}:{mod_target_name}")
-
- if not local_obj.get(AVALON_PROPERTY):
- local_obj[AVALON_PROPERTY] = dict()
-
- avalon_info = local_obj[AVALON_PROPERTY]
- avalon_info.update({"container_name": group_name})
-
- local_obj.parent = asset_group
- objects.append(local_obj)
-
- while bpy.data.orphans_purge(do_local_ids=False):
- pass
-
- plugin.deselect_all()
-
- return objects
-
- def process_asset(
- self, context: dict, name: str, namespace: Optional[str] = None,
- options: Optional[Dict] = None
- ) -> Optional[List]:
- """
- Arguments:
- name: Use pre-defined name
- namespace: Use pre-defined namespace
- context: Full parenthood of representation to load
- options: Additional settings dictionary
- """
- libpath = self.fname
- asset = context["asset"]["name"]
- subset = context["subset"]["name"]
-
- asset_name = plugin.asset_name(asset, subset)
- unique_number = plugin.get_unique_number(asset, subset)
- group_name = plugin.asset_name(asset, subset, unique_number)
- namespace = namespace or f"{asset}_{unique_number}"
-
- avalon_container = bpy.data.collections.get(AVALON_CONTAINERS)
- if not avalon_container:
- avalon_container = bpy.data.collections.new(name=AVALON_CONTAINERS)
- bpy.context.scene.collection.children.link(avalon_container)
-
- asset_group = bpy.data.objects.new(group_name, object_data=None)
- asset_group.empty_display_type = 'SINGLE_ARROW'
- avalon_container.objects.link(asset_group)
-
- action = None
-
- plugin.deselect_all()
-
- create_animation = False
- anim_file = None
-
- if options is not None:
- parent = options.get('parent')
- transform = options.get('transform')
- action = options.get('action')
- create_animation = options.get('create_animation')
- anim_file = options.get('animation_file')
-
- if parent and transform:
- location = transform.get('translation')
- rotation = transform.get('rotation')
- scale = transform.get('scale')
-
- asset_group.location = (
- location.get('x'),
- location.get('y'),
- location.get('z')
- )
- asset_group.rotation_euler = (
- rotation.get('x'),
- rotation.get('y'),
- rotation.get('z')
- )
- asset_group.scale = (
- scale.get('x'),
- scale.get('y'),
- scale.get('z')
- )
-
- bpy.context.view_layer.objects.active = parent
- asset_group.select_set(True)
-
- bpy.ops.object.parent_set(keep_transform=True)
-
- plugin.deselect_all()
-
- objects = self._process(libpath, asset_group, group_name, action)
-
- if create_animation:
- creator_plugin = get_legacy_creator_by_name("CreateAnimation")
- if not creator_plugin:
- raise ValueError("Creator plugin \"CreateAnimation\" was "
- "not found.")
-
- asset_group.select_set(True)
-
- animation_asset = options.get('animation_asset')
-
- legacy_create(
- creator_plugin,
- name=namespace + "_animation",
- # name=f"{unique_number}_{subset}_animation",
- asset=animation_asset,
- options={"useSelection": False, "asset_group": asset_group},
- data={"dependencies": str(context["representation"]["_id"])}
- )
-
- plugin.deselect_all()
-
- if anim_file:
- bpy.ops.import_scene.fbx(filepath=anim_file, anim_offset=0.0)
-
- imported = get_selection()
-
- armature = [
- o for o in asset_group.children if o.type == 'ARMATURE'][0]
-
- imported_group = [
- o for o in imported if o.type == 'EMPTY'][0]
-
- for obj in imported:
- if obj.type == 'ARMATURE':
- if not armature.animation_data:
- armature.animation_data_create()
- armature.animation_data.action = obj.animation_data.action
-
- self._remove(imported_group)
- bpy.data.objects.remove(imported_group)
-
- bpy.context.scene.collection.objects.link(asset_group)
-
- asset_group[AVALON_PROPERTY] = {
- "schema": "openpype:container-2.0",
- "id": AVALON_CONTAINER_ID,
- "name": name,
- "namespace": namespace or '',
- "loader": str(self.__class__.__name__),
- "representation": str(context["representation"]["_id"]),
- "libpath": libpath,
- "asset_name": asset_name,
- "parent": str(context["representation"]["parent"]),
- "family": context["representation"]["context"]["family"],
- "objectName": group_name
- }
-
- self[:] = objects
- return objects
-
- def exec_update(self, container: Dict, representation: Dict):
- """Update the loaded asset.
-
- This will remove all children of the asset group, load the new ones
- and add them as children of the group.
- """
- object_name = container["objectName"]
- asset_group = bpy.data.objects.get(object_name)
- libpath = Path(get_representation_path(representation))
- extension = libpath.suffix.lower()
-
- self.log.info(
- "Container: %s\nRepresentation: %s",
- pformat(container, indent=2),
- pformat(representation, indent=2),
- )
-
- assert asset_group, (
- f"The asset is not loaded: {container['objectName']}"
- )
- assert libpath, (
- "No existing library file found for {container['objectName']}"
- )
- assert libpath.is_file(), (
- f"The file doesn't exist: {libpath}"
- )
- assert extension in plugin.VALID_EXTENSIONS, (
- f"Unsupported file: {libpath}"
- )
-
- metadata = asset_group.get(AVALON_PROPERTY)
- group_libpath = metadata["libpath"]
-
- normalized_group_libpath = (
- str(Path(bpy.path.abspath(group_libpath)).resolve())
- )
- normalized_libpath = (
- str(Path(bpy.path.abspath(str(libpath))).resolve())
- )
- self.log.debug(
- "normalized_group_libpath:\n %s\nnormalized_libpath:\n %s",
- normalized_group_libpath,
- normalized_libpath,
- )
- if normalized_group_libpath == normalized_libpath:
- self.log.info("Library already loaded, not updating...")
- return
-
- # Check how many assets use the same library
- count = 0
- for obj in bpy.data.collections.get(AVALON_CONTAINERS).objects:
- if obj.get(AVALON_PROPERTY).get('libpath') == group_libpath:
- count += 1
-
- # Get the armature of the rig
- objects = asset_group.children
- armature = [obj for obj in objects if obj.type == 'ARMATURE'][0]
-
- action = None
- if armature.animation_data and armature.animation_data.action:
- action = armature.animation_data.action
-
- mat = asset_group.matrix_basis.copy()
-
- self._remove(asset_group)
-
- # If it is the last object to use that library, remove it
- if count == 1:
- library = bpy.data.libraries.get(bpy.path.basename(group_libpath))
- bpy.data.libraries.remove(library)
-
- self._process(str(libpath), asset_group, object_name, action)
-
- asset_group.matrix_basis = mat
-
- metadata["libpath"] = str(libpath)
- metadata["representation"] = str(representation["_id"])
- metadata["parent"] = str(representation["parent"])
-
- def exec_remove(self, container: Dict) -> bool:
- """Remove an existing asset group from a Blender scene.
-
- Arguments:
- container (openpype:container-1.0): Container to remove,
- from `host.ls()`.
-
- Returns:
- bool: Whether the asset group was deleted.
- """
- object_name = container["objectName"]
- asset_group = bpy.data.objects.get(object_name)
- libpath = asset_group.get(AVALON_PROPERTY).get('libpath')
-
- # Check how many assets use the same library
- count = 0
- for obj in bpy.data.collections.get(AVALON_CONTAINERS).objects:
- if obj.get(AVALON_PROPERTY).get('libpath') == libpath:
- count += 1
-
- if not asset_group:
- return False
-
- self._remove(asset_group)
-
- bpy.data.objects.remove(asset_group)
-
- # If it is the last object to use that library, remove it
- if count == 1:
- library = bpy.data.libraries.get(bpy.path.basename(libpath))
- bpy.data.libraries.remove(library)
-
- return True
diff --git a/openpype/hosts/blender/plugins/publish/collect_current_file.py b/openpype/hosts/blender/plugins/publish/collect_current_file.py
index c3097a0694..c2d8a96a18 100644
--- a/openpype/hosts/blender/plugins/publish/collect_current_file.py
+++ b/openpype/hosts/blender/plugins/publish/collect_current_file.py
@@ -2,7 +2,7 @@ import os
import bpy
import pyblish.api
-from openpype.pipeline import legacy_io
+from openpype.pipeline import get_current_task_name, get_current_asset_name
from openpype.hosts.blender.api import workio
@@ -37,7 +37,7 @@ class CollectBlenderCurrentFile(pyblish.api.ContextPlugin):
folder, file = os.path.split(current_file)
filename, ext = os.path.splitext(file)
- task = legacy_io.Session["AVALON_TASK"]
+ task = get_current_task_name()
data = {}
@@ -47,7 +47,7 @@ class CollectBlenderCurrentFile(pyblish.api.ContextPlugin):
data.update({
"subset": subset,
- "asset": os.getenv("AVALON_ASSET", None),
+ "asset": get_current_asset_name(),
"label": subset,
"publish": True,
"family": "workfile",
diff --git a/openpype/hosts/blender/plugins/publish/collect_render.py b/openpype/hosts/blender/plugins/publish/collect_render.py
new file mode 100644
index 0000000000..92e2473a95
--- /dev/null
+++ b/openpype/hosts/blender/plugins/publish/collect_render.py
@@ -0,0 +1,123 @@
+# -*- coding: utf-8 -*-
+"""Collect render data."""
+
+import os
+import re
+
+import bpy
+
+from openpype.hosts.blender.api import colorspace
+import pyblish.api
+
+
+class CollectBlenderRender(pyblish.api.InstancePlugin):
+ """Gather all publishable render layers from renderSetup."""
+
+ order = pyblish.api.CollectorOrder + 0.01
+ hosts = ["blender"]
+ families = ["render"]
+ label = "Collect Render Layers"
+ sync_workfile_version = False
+
+ @staticmethod
+ def generate_expected_beauty(
+ render_product, frame_start, frame_end, frame_step, ext
+ ):
+ """
+ Generate the expected files for the render product for the beauty
+ render. This returns a list of files that should be rendered. It
+ replaces the sequence of `#` with the frame number.
+ """
+ path = os.path.dirname(render_product)
+ file = os.path.basename(render_product)
+
+ expected_files = []
+
+ for frame in range(frame_start, frame_end + 1, frame_step):
+ frame_str = str(frame).rjust(4, "0")
+ filename = re.sub("#+", frame_str, file)
+ expected_file = f"{os.path.join(path, filename)}.{ext}"
+ expected_files.append(expected_file.replace("\\", "/"))
+
+ return {
+ "beauty": expected_files
+ }
+
+ @staticmethod
+ def generate_expected_aovs(
+ aov_file_product, frame_start, frame_end, frame_step, ext
+ ):
+ """
+ Generate the expected files for the render product for the beauty
+ render. This returns a list of files that should be rendered. It
+ replaces the sequence of `#` with the frame number.
+ """
+ expected_files = {}
+
+ for aov_name, aov_file in aov_file_product:
+ path = os.path.dirname(aov_file)
+ file = os.path.basename(aov_file)
+
+ aov_files = []
+
+ for frame in range(frame_start, frame_end + 1, frame_step):
+ frame_str = str(frame).rjust(4, "0")
+ filename = re.sub("#+", frame_str, file)
+ expected_file = f"{os.path.join(path, filename)}.{ext}"
+ aov_files.append(expected_file.replace("\\", "/"))
+
+ expected_files[aov_name] = aov_files
+
+ return expected_files
+
+ def process(self, instance):
+ context = instance.context
+
+ render_data = bpy.data.collections[str(instance)].get("render_data")
+
+ assert render_data, "No render data found."
+
+ self.log.info(f"render_data: {dict(render_data)}")
+
+ render_product = render_data.get("render_product")
+ aov_file_product = render_data.get("aov_file_product")
+ ext = render_data.get("image_format")
+ multilayer = render_data.get("multilayer_exr")
+
+ frame_start = context.data["frameStart"]
+ frame_end = context.data["frameEnd"]
+ frame_handle_start = context.data["frameStartHandle"]
+ frame_handle_end = context.data["frameEndHandle"]
+
+ expected_beauty = self.generate_expected_beauty(
+ render_product, int(frame_start), int(frame_end),
+ int(bpy.context.scene.frame_step), ext)
+
+ expected_aovs = self.generate_expected_aovs(
+ aov_file_product, int(frame_start), int(frame_end),
+ int(bpy.context.scene.frame_step), ext)
+
+ expected_files = expected_beauty | expected_aovs
+
+ instance.data.update({
+ "family": "render.farm",
+ "frameStart": frame_start,
+ "frameEnd": frame_end,
+ "frameStartHandle": frame_handle_start,
+ "frameEndHandle": frame_handle_end,
+ "fps": context.data["fps"],
+ "byFrameStep": bpy.context.scene.frame_step,
+ "review": render_data.get("review", False),
+ "multipartExr": ext == "exr" and multilayer,
+ "farm": True,
+ "expectedFiles": [expected_files],
+ # OCIO not currently implemented in Blender, but the following
+ # settings are required by the schema, so it is hardcoded.
+ # TODO: Implement OCIO in Blender
+ "colorspaceConfig": "",
+ "colorspaceDisplay": "sRGB",
+ "colorspaceView": "ACES 1.0 SDR-video",
+ "renderProducts": colorspace.ARenderProduct(),
+ })
+
+ self.log.info(f"data: {instance.data}")
diff --git a/openpype/hosts/blender/plugins/publish/collect_review.py b/openpype/hosts/blender/plugins/publish/collect_review.py
index d6abd9d967..3bf2e39e24 100644
--- a/openpype/hosts/blender/plugins/publish/collect_review.py
+++ b/openpype/hosts/blender/plugins/publish/collect_review.py
@@ -1,7 +1,6 @@
import bpy
import pyblish.api
-from openpype.pipeline import legacy_io
class CollectReview(pyblish.api.InstancePlugin):
@@ -30,6 +29,8 @@ class CollectReview(pyblish.api.InstancePlugin):
camera = cameras[0].name
self.log.debug(f"camera: {camera}")
+ focal_length = cameras[0].data.lens
+
# get isolate objects list from meshes instance members .
isolate_objects = [
obj
@@ -38,11 +39,11 @@ class CollectReview(pyblish.api.InstancePlugin):
]
if not instance.data.get("remove"):
-
- task = legacy_io.Session.get("AVALON_TASK")
+ # Store focal length in `burninDataMembers`
+ burninData = instance.data.setdefault("burninDataMembers", {})
+ burninData["focalLength"] = focal_length
instance.data.update({
- "subset": f"{task}Review",
"review_camera": camera,
"frameStart": instance.context.data["frameStart"],
"frameEnd": instance.context.data["frameEnd"],
diff --git a/openpype/hosts/blender/plugins/publish/extract_abc.py b/openpype/hosts/blender/plugins/publish/extract_abc.py
index 1cab9d225b..87159e53f0 100644
--- a/openpype/hosts/blender/plugins/publish/extract_abc.py
+++ b/openpype/hosts/blender/plugins/publish/extract_abc.py
@@ -21,34 +21,31 @@ class ExtractABC(publish.Extractor):
filename = f"{instance.name}.abc"
filepath = os.path.join(stagingdir, filename)
- context = bpy.context
- scene = context.scene
- view_layer = context.view_layer
-
# Perform extraction
self.log.info("Performing extraction..")
plugin.deselect_all()
selected = []
- asset_group = None
+ active = None
for obj in instance:
obj.select_set(True)
selected.append(obj)
+ # Set as active the asset group
if obj.get(AVALON_PROPERTY):
- asset_group = obj
+ active = obj
context = plugin.create_blender_context(
- active=asset_group, selected=selected)
+ active=active, selected=selected)
- # We export the abc
- bpy.ops.wm.alembic_export(
- context,
- filepath=filepath,
- selected=True,
- flatten=False
- )
+ with bpy.context.temp_override(**context):
+ # We export the abc
+ bpy.ops.wm.alembic_export(
+ filepath=filepath,
+ selected=True,
+ flatten=False
+ )
plugin.deselect_all()
diff --git a/openpype/hosts/blender/plugins/publish/extract_abc_animation.py b/openpype/hosts/blender/plugins/publish/extract_abc_animation.py
index e141ccaa44..44b2ba3761 100644
--- a/openpype/hosts/blender/plugins/publish/extract_abc_animation.py
+++ b/openpype/hosts/blender/plugins/publish/extract_abc_animation.py
@@ -20,8 +20,6 @@ class ExtractAnimationABC(publish.Extractor):
filename = f"{instance.name}.abc"
filepath = os.path.join(stagingdir, filename)
- context = bpy.context
-
# Perform extraction
self.log.info("Performing extraction..")
diff --git a/openpype/hosts/blender/plugins/publish/extract_blend.py b/openpype/hosts/blender/plugins/publish/extract_blend.py
index 6a001b6f65..d4f26b4f3c 100644
--- a/openpype/hosts/blender/plugins/publish/extract_blend.py
+++ b/openpype/hosts/blender/plugins/publish/extract_blend.py
@@ -10,7 +10,7 @@ class ExtractBlend(publish.Extractor):
label = "Extract Blend"
hosts = ["blender"]
- families = ["model", "camera", "rig", "action", "layout"]
+ families = ["model", "camera", "rig", "action", "layout", "blendScene"]
optional = True
def process(self, instance):
diff --git a/openpype/hosts/blender/plugins/publish/extract_camera_abc.py b/openpype/hosts/blender/plugins/publish/extract_camera_abc.py
new file mode 100644
index 0000000000..036be7bf3c
--- /dev/null
+++ b/openpype/hosts/blender/plugins/publish/extract_camera_abc.py
@@ -0,0 +1,68 @@
+import os
+
+import bpy
+
+from openpype.pipeline import publish
+from openpype.hosts.blender.api import plugin
+from openpype.hosts.blender.api.pipeline import AVALON_PROPERTY
+
+
+class ExtractCameraABC(publish.Extractor):
+ """Extract camera as ABC."""
+
+ label = "Extract Camera (ABC)"
+ hosts = ["blender"]
+ families = ["camera"]
+ optional = True
+
+ def process(self, instance):
+ # Define extract output file path
+ stagingdir = self.staging_dir(instance)
+ filename = f"{instance.name}.abc"
+ filepath = os.path.join(stagingdir, filename)
+
+ # Perform extraction
+ self.log.info("Performing extraction..")
+
+ plugin.deselect_all()
+
+ asset_group = None
+ for obj in instance:
+ if obj.get(AVALON_PROPERTY):
+ asset_group = obj
+ break
+ assert asset_group, "No asset group found"
+
+ # Need to cast to list because children is a tuple
+ selected = list(asset_group.children)
+ active = selected[0]
+
+ for obj in selected:
+ obj.select_set(True)
+
+ context = plugin.create_blender_context(
+ active=active, selected=selected)
+
+ with bpy.context.temp_override(**context):
+ # We export the abc
+ bpy.ops.wm.alembic_export(
+ filepath=filepath,
+ selected=True,
+ flatten=True
+ )
+
+ plugin.deselect_all()
+
+ if "representations" not in instance.data:
+ instance.data["representations"] = []
+
+ representation = {
+ 'name': 'abc',
+ 'ext': 'abc',
+ 'files': filename,
+ "stagingDir": stagingdir,
+ }
+ instance.data["representations"].append(representation)
+
+ self.log.info("Extracted instance '%s' to: %s",
+ instance.name, representation)
diff --git a/openpype/hosts/blender/plugins/publish/extract_camera.py b/openpype/hosts/blender/plugins/publish/extract_camera_fbx.py
similarity index 98%
rename from openpype/hosts/blender/plugins/publish/extract_camera.py
rename to openpype/hosts/blender/plugins/publish/extract_camera_fbx.py
index 9fd181825c..315994140e 100644
--- a/openpype/hosts/blender/plugins/publish/extract_camera.py
+++ b/openpype/hosts/blender/plugins/publish/extract_camera_fbx.py
@@ -9,7 +9,7 @@ from openpype.hosts.blender.api import plugin
class ExtractCamera(publish.Extractor):
"""Extract as the camera as FBX."""
- label = "Extract Camera"
+ label = "Extract Camera (FBX)"
hosts = ["blender"]
families = ["camera"]
optional = True
diff --git a/openpype/hosts/blender/plugins/publish/increment_workfile_version.py b/openpype/hosts/blender/plugins/publish/increment_workfile_version.py
index 963ca1398f..3d176f9c30 100644
--- a/openpype/hosts/blender/plugins/publish/increment_workfile_version.py
+++ b/openpype/hosts/blender/plugins/publish/increment_workfile_version.py
@@ -9,7 +9,8 @@ class IncrementWorkfileVersion(pyblish.api.ContextPlugin):
label = "Increment Workfile Version"
optional = True
hosts = ["blender"]
- families = ["animation", "model", "rig", "action", "layout"]
+ families = ["animation", "model", "rig", "action", "layout", "blendScene",
+ "render"]
def process(self, context):
diff --git a/openpype/hosts/blender/plugins/publish/validate_deadline_publish.py b/openpype/hosts/blender/plugins/publish/validate_deadline_publish.py
new file mode 100644
index 0000000000..14220b5c9c
--- /dev/null
+++ b/openpype/hosts/blender/plugins/publish/validate_deadline_publish.py
@@ -0,0 +1,47 @@
+import os
+
+import bpy
+
+import pyblish.api
+from openpype.pipeline.publish import (
+ RepairAction,
+ ValidateContentsOrder,
+ PublishValidationError,
+ OptionalPyblishPluginMixin
+)
+from openpype.hosts.blender.api.render_lib import prepare_rendering
+
+
+class ValidateDeadlinePublish(pyblish.api.InstancePlugin,
+ OptionalPyblishPluginMixin):
+ """Validates Render File Directory is
+ not the same in every submission
+ """
+
+ order = ValidateContentsOrder
+ families = ["render.farm"]
+ hosts = ["blender"]
+ label = "Validate Render Output for Deadline"
+ optional = True
+ actions = [RepairAction]
+
+ def process(self, instance):
+ if not self.is_active(instance.data):
+ return
+ filepath = bpy.data.filepath
+ file = os.path.basename(filepath)
+ filename, ext = os.path.splitext(file)
+ if filename not in bpy.context.scene.render.filepath:
+ raise PublishValidationError(
+ "Render output folder "
+ "doesn't match the blender scene name! "
+ "Use Repair action to "
+ "fix the folder file path.."
+ )
+
+ @classmethod
+ def repair(cls, instance):
+ container = bpy.data.collections[str(instance)]
+ prepare_rendering(container)
+ bpy.ops.wm.save_as_mainfile(filepath=bpy.data.filepath)
+ cls.log.debug("Reset the render output folder...")
diff --git a/openpype/hosts/blender/plugins/publish/validate_file_saved.py b/openpype/hosts/blender/plugins/publish/validate_file_saved.py
new file mode 100644
index 0000000000..e191585c55
--- /dev/null
+++ b/openpype/hosts/blender/plugins/publish/validate_file_saved.py
@@ -0,0 +1,20 @@
+import bpy
+
+import pyblish.api
+
+
+class ValidateFileSaved(pyblish.api.InstancePlugin):
+ """Validate that the workfile has been saved."""
+
+ order = pyblish.api.ValidatorOrder - 0.01
+ hosts = ["blender"]
+ label = "Validate File Saved"
+ optional = False
+ exclude_families = []
+
+ def process(self, instance):
+ if [ef for ef in self.exclude_families
+ if instance.data["family"] in ef]:
+ return
+ if bpy.data.is_dirty:
+ raise RuntimeError("Workfile is not saved.")
diff --git a/openpype/hosts/blender/plugins/publish/validate_render_camera_is_set.py b/openpype/hosts/blender/plugins/publish/validate_render_camera_is_set.py
new file mode 100644
index 0000000000..ba3a796f35
--- /dev/null
+++ b/openpype/hosts/blender/plugins/publish/validate_render_camera_is_set.py
@@ -0,0 +1,17 @@
+import bpy
+
+import pyblish.api
+
+
+class ValidateRenderCameraIsSet(pyblish.api.InstancePlugin):
+ """Validate that there is a camera set as active for rendering."""
+
+ order = pyblish.api.ValidatorOrder
+ hosts = ["blender"]
+ families = ["render"]
+ label = "Validate Render Camera Is Set"
+ optional = False
+
+ def process(self, instance):
+ if not bpy.context.scene.camera:
+ raise RuntimeError("No camera is active for rendering.")
diff --git a/openpype/hosts/celaction/hooks/pre_celaction_setup.py b/openpype/hosts/celaction/hooks/pre_celaction_setup.py
index 96e784875c..83aeab7c58 100644
--- a/openpype/hosts/celaction/hooks/pre_celaction_setup.py
+++ b/openpype/hosts/celaction/hooks/pre_celaction_setup.py
@@ -2,20 +2,18 @@ import os
import shutil
import winreg
import subprocess
-from openpype.lib import PreLaunchHook, get_openpype_execute_args
-from openpype.hosts.celaction import scripts
-
-CELACTION_SCRIPTS_DIR = os.path.dirname(
- os.path.abspath(scripts.__file__)
-)
+from openpype.lib import get_openpype_execute_args
+from openpype.lib.applications import PreLaunchHook, LaunchTypes
+from openpype.hosts.celaction import CELACTION_ROOT_DIR
class CelactionPrelaunchHook(PreLaunchHook):
"""
Bootstrap celacion with pype
"""
- app_groups = ["celaction"]
- platforms = ["windows"]
+ app_groups = {"celaction"}
+ platforms = {"windows"}
+ launch_types = {LaunchTypes.local}
def execute(self):
asset_doc = self.data["asset_doc"]
@@ -37,7 +35,9 @@ class CelactionPrelaunchHook(PreLaunchHook):
winreg.KEY_ALL_ACCESS
)
- path_to_cli = os.path.join(CELACTION_SCRIPTS_DIR, "publish_cli.py")
+ path_to_cli = os.path.join(
+ CELACTION_ROOT_DIR, "scripts", "publish_cli.py"
+ )
subprocess_args = get_openpype_execute_args("run", path_to_cli)
openpype_executable = subprocess_args.pop(0)
workfile_settings = self.get_workfile_settings()
@@ -122,9 +122,8 @@ class CelactionPrelaunchHook(PreLaunchHook):
if not os.path.exists(workfile_path):
# TODO add ability to set different template workfile path via
# settings
- openpype_celaction_dir = os.path.dirname(CELACTION_SCRIPTS_DIR)
template_path = os.path.join(
- openpype_celaction_dir,
+ CELACTION_ROOT_DIR,
"resources",
"celaction_template_scene.scn"
)
diff --git a/openpype/hosts/celaction/plugins/publish/collect_celaction_instances.py b/openpype/hosts/celaction/plugins/publish/collect_celaction_instances.py
index 35ac7fc264..c815c1edd4 100644
--- a/openpype/hosts/celaction/plugins/publish/collect_celaction_instances.py
+++ b/openpype/hosts/celaction/plugins/publish/collect_celaction_instances.py
@@ -1,6 +1,5 @@
import os
import pyblish.api
-from openpype.pipeline import legacy_io
class CollectCelactionInstances(pyblish.api.ContextPlugin):
@@ -10,7 +9,7 @@ class CollectCelactionInstances(pyblish.api.ContextPlugin):
order = pyblish.api.CollectorOrder + 0.1
def process(self, context):
- task = legacy_io.Session["AVALON_TASK"]
+ task = context.data["task"]
current_file = context.data["currentFile"]
staging_dir = os.path.dirname(current_file)
scene_file = os.path.basename(current_file)
diff --git a/openpype/hosts/flame/api/menu.py b/openpype/hosts/flame/api/menu.py
index 5f9dc57a61..e8bdf32ebd 100644
--- a/openpype/hosts/flame/api/menu.py
+++ b/openpype/hosts/flame/api/menu.py
@@ -1,7 +1,9 @@
-import os
-from qtpy import QtWidgets
from copy import deepcopy
from pprint import pformat
+
+from qtpy import QtWidgets
+
+from openpype.pipeline import get_current_project_name
from openpype.tools.utils.host_tools import HostToolsHelper
menu_group_name = 'OpenPype'
@@ -61,10 +63,10 @@ class _FlameMenuApp(object):
self.framework.prefs_global, self.name)
self.mbox = QtWidgets.QMessageBox()
-
+ project_name = get_current_project_name()
self.menu = {
"actions": [{
- 'name': os.getenv("AVALON_PROJECT", "project"),
+ 'name': project_name or "project",
'isEnabled': False
}],
"name": self.menu_group_name
diff --git a/openpype/hosts/flame/hooks/pre_flame_setup.py b/openpype/hosts/flame/hooks/pre_flame_setup.py
index 83110bb6b5..850569cfdd 100644
--- a/openpype/hosts/flame/hooks/pre_flame_setup.py
+++ b/openpype/hosts/flame/hooks/pre_flame_setup.py
@@ -6,13 +6,10 @@ import socket
from pprint import pformat
from openpype.lib import (
- PreLaunchHook,
get_openpype_username,
run_subprocess,
)
-from openpype.lib.applications import (
- ApplicationLaunchFailed
-)
+from openpype.lib.applications import PreLaunchHook, LaunchTypes
from openpype.hosts import flame as opflame
@@ -22,11 +19,12 @@ class FlamePrelaunch(PreLaunchHook):
Will make sure flame_script_dirs are copied to user's folder defined
in environment var FLAME_SCRIPT_DIR.
"""
- app_groups = ["flame"]
+ app_groups = {"flame"}
permissions = 0o777
wtc_script_path = os.path.join(
opflame.HOST_DIR, "api", "scripts", "wiretap_com.py")
+ launch_types = {LaunchTypes.local}
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
diff --git a/openpype/hosts/flame/plugins/load/load_clip.py b/openpype/hosts/flame/plugins/load/load_clip.py
index dfb2d2b6f0..ca4eab0f63 100644
--- a/openpype/hosts/flame/plugins/load/load_clip.py
+++ b/openpype/hosts/flame/plugins/load/load_clip.py
@@ -48,7 +48,6 @@ class LoadClip(opfapi.ClipLoader):
self.fpd = fproject.current_workspace.desktop
# load clip to timeline and get main variables
- namespace = namespace
version = context['version']
version_data = version.get("data", {})
version_name = version.get("name", None)
@@ -82,8 +81,9 @@ class LoadClip(opfapi.ClipLoader):
os.makedirs(openclip_dir)
# prepare clip data from context ad send it to openClipLoader
+ path = self.filepath_from_context(context)
loading_context = {
- "path": self.fname.replace("\\", "/"),
+ "path": path.replace("\\", "/"),
"colorspace": colorspace,
"version": "v{:0>3}".format(version_name),
"layer_rename_template": self.layer_rename_template,
diff --git a/openpype/hosts/flame/plugins/load/load_clip_batch.py b/openpype/hosts/flame/plugins/load/load_clip_batch.py
index 5c5a77f0d0..1f3a017d72 100644
--- a/openpype/hosts/flame/plugins/load/load_clip_batch.py
+++ b/openpype/hosts/flame/plugins/load/load_clip_batch.py
@@ -45,7 +45,6 @@ class LoadClipBatch(opfapi.ClipLoader):
self.batch = options.get("batch") or flame.batch
# load clip to timeline and get main variables
- namespace = namespace
version = context['version']
version_data = version.get("data", {})
version_name = version.get("name", None)
@@ -81,9 +80,10 @@ class LoadClipBatch(opfapi.ClipLoader):
if not os.path.exists(openclip_dir):
os.makedirs(openclip_dir)
- # prepare clip data from context ad send it to openClipLoader
+ # prepare clip data from context and send it to openClipLoader
+ path = self.filepath_from_context(context)
loading_context = {
- "path": self.fname.replace("\\", "/"),
+ "path": path.replace("\\", "/"),
"colorspace": colorspace,
"version": "v{:0>3}".format(version_name),
"layer_rename_template": self.layer_rename_template,
diff --git a/openpype/hosts/flame/plugins/publish/collect_timeline_instances.py b/openpype/hosts/flame/plugins/publish/collect_timeline_instances.py
index 23fdf5e785..e14f960a2b 100644
--- a/openpype/hosts/flame/plugins/publish/collect_timeline_instances.py
+++ b/openpype/hosts/flame/plugins/publish/collect_timeline_instances.py
@@ -325,7 +325,6 @@ class CollectTimelineInstances(pyblish.api.ContextPlugin):
def _create_shot_instance(self, context, clip_name, **data):
master_layer = data.get("heroTrack")
hierarchy_data = data.get("hierarchyData")
- asset = data.get("asset")
if not master_layer:
return
diff --git a/openpype/hosts/flame/plugins/publish/collect_timeline_otio.py b/openpype/hosts/flame/plugins/publish/collect_timeline_otio.py
index 917041e053..f8cfa9e963 100644
--- a/openpype/hosts/flame/plugins/publish/collect_timeline_otio.py
+++ b/openpype/hosts/flame/plugins/publish/collect_timeline_otio.py
@@ -2,7 +2,6 @@ import pyblish.api
import openpype.hosts.flame.api as opfapi
from openpype.hosts.flame.otio import flame_export
-from openpype.pipeline import legacy_io
from openpype.pipeline.create import get_subset_name
@@ -19,7 +18,7 @@ class CollecTimelineOTIO(pyblish.api.ContextPlugin):
# main
asset_doc = context.data["assetEntity"]
- task_name = legacy_io.Session["AVALON_TASK"]
+ task_name = context.data["task"]
project = opfapi.get_current_project()
sequence = opfapi.get_current_sequence(opfapi.CTX.selection)
diff --git a/openpype/hosts/fusion/addon.py b/openpype/hosts/fusion/addon.py
index 45683cfbde..8343f3c79d 100644
--- a/openpype/hosts/fusion/addon.py
+++ b/openpype/hosts/fusion/addon.py
@@ -60,8 +60,9 @@ class FusionAddon(OpenPypeModule, IHostAddon):
return []
return [os.path.join(FUSION_HOST_DIR, "hooks")]
- def add_implementation_envs(self, env, _app):
+ def add_implementation_envs(self, env, app):
# Set default values if are not already set via settings
+
defaults = {"OPENPYPE_LOG_NO_COLORS": "Yes"}
for key, value in defaults.items():
if not env.get(key):
diff --git a/openpype/hosts/fusion/api/__init__.py b/openpype/hosts/fusion/api/__init__.py
index dba55a98d9..aabc624016 100644
--- a/openpype/hosts/fusion/api/__init__.py
+++ b/openpype/hosts/fusion/api/__init__.py
@@ -3,9 +3,7 @@ from .pipeline import (
ls,
imprint_container,
- parse_container,
- list_instances,
- remove_instance
+ parse_container
)
from .lib import (
@@ -22,6 +20,7 @@ from .menu import launch_openpype_menu
__all__ = [
# pipeline
+ "FusionHost",
"ls",
"imprint_container",
@@ -32,6 +31,7 @@ __all__ = [
"update_frame_range",
"set_asset_framerange",
"get_current_comp",
+ "get_bmd_library",
"comp_lock_and_undo_chunk",
# menu
diff --git a/openpype/hosts/fusion/api/action.py b/openpype/hosts/fusion/api/action.py
index 347d552108..66b787c2f1 100644
--- a/openpype/hosts/fusion/api/action.py
+++ b/openpype/hosts/fusion/api/action.py
@@ -18,8 +18,10 @@ class SelectInvalidAction(pyblish.api.Action):
icon = "search" # Icon from Awesome Icon
def process(self, context, plugin):
- errored_instances = get_errored_instances_from_context(context,
- plugin=plugin)
+ errored_instances = get_errored_instances_from_context(
+ context,
+ plugin=plugin,
+ )
# Get the invalid nodes for the plug-ins
self.log.info("Finding invalid nodes..")
@@ -51,6 +53,7 @@ class SelectInvalidAction(pyblish.api.Action):
names = set()
for tool in invalid:
flow.Select(tool, True)
+ comp.SetActiveTool(tool)
names.add(tool.Name)
self.log.info(
"Selecting invalid tools: %s" % ", ".join(sorted(names))
diff --git a/openpype/hosts/fusion/api/lib.py b/openpype/hosts/fusion/api/lib.py
index cba8c38c2f..c4a1488606 100644
--- a/openpype/hosts/fusion/api/lib.py
+++ b/openpype/hosts/fusion/api/lib.py
@@ -14,7 +14,7 @@ from openpype.client import (
)
from openpype.pipeline import (
switch_container,
- legacy_io,
+ get_current_project_name,
)
from openpype.pipeline.context_tools import get_current_project_asset
@@ -181,80 +181,6 @@ def validate_comp_prefs(comp=None, force_repair=False):
dialog.setStyleSheet(load_stylesheet())
-def switch_item(container,
- asset_name=None,
- subset_name=None,
- representation_name=None):
- """Switch container asset, subset or representation of a container by name.
-
- It'll always switch to the latest version - of course a different
- approach could be implemented.
-
- Args:
- container (dict): data of the item to switch with
- asset_name (str): name of the asset
- subset_name (str): name of the subset
- representation_name (str): name of the representation
-
- Returns:
- dict
-
- """
-
- if all(not x for x in [asset_name, subset_name, representation_name]):
- raise ValueError("Must have at least one change provided to switch.")
-
- # Collect any of current asset, subset and representation if not provided
- # so we can use the original name from those.
- project_name = legacy_io.active_project()
- if any(not x for x in [asset_name, subset_name, representation_name]):
- repre_id = container["representation"]
- representation = get_representation_by_id(project_name, repre_id)
- repre_parent_docs = get_representation_parents(
- project_name, representation)
- if repre_parent_docs:
- version, subset, asset, _ = repre_parent_docs
- else:
- version = subset = asset = None
-
- if asset_name is None:
- asset_name = asset["name"]
-
- if subset_name is None:
- subset_name = subset["name"]
-
- if representation_name is None:
- representation_name = representation["name"]
-
- # Find the new one
- asset = get_asset_by_name(project_name, asset_name, fields=["_id"])
- assert asset, ("Could not find asset in the database with the name "
- "'%s'" % asset_name)
-
- subset = get_subset_by_name(
- project_name, subset_name, asset["_id"], fields=["_id"]
- )
- assert subset, ("Could not find subset in the database with the name "
- "'%s'" % subset_name)
-
- version = get_last_version_by_subset_id(
- project_name, subset["_id"], fields=["_id"]
- )
- assert version, "Could not find a version for {}.{}".format(
- asset_name, subset_name
- )
-
- representation = get_representation_by_name(
- project_name, representation_name, version["_id"]
- )
- assert representation, ("Could not find representation in the database "
- "with the name '%s'" % representation_name)
-
- switch_container(container, representation)
-
- return representation
-
-
@contextlib.contextmanager
def maintained_selection(comp=None):
"""Reset comp selection from before the context after the context"""
diff --git a/openpype/hosts/fusion/api/menu.py b/openpype/hosts/fusion/api/menu.py
index 92f38a64c2..50250a6656 100644
--- a/openpype/hosts/fusion/api/menu.py
+++ b/openpype/hosts/fusion/api/menu.py
@@ -12,7 +12,7 @@ from openpype.hosts.fusion.api.lib import (
set_asset_framerange,
set_asset_resolution,
)
-from openpype.pipeline import legacy_io
+from openpype.pipeline import get_current_asset_name
from openpype.resources import get_openpype_icon_filepath
from .pipeline import FusionEventHandler
@@ -125,7 +125,7 @@ class OpenPypeMenu(QtWidgets.QWidget):
def on_task_changed(self):
# Update current context label
- label = legacy_io.Session["AVALON_ASSET"]
+ label = get_current_asset_name()
self.asset_label.setText(label)
def register_callback(self, name, fn):
diff --git a/openpype/hosts/fusion/api/pipeline.py b/openpype/hosts/fusion/api/pipeline.py
index a768a3f0f8..a886086758 100644
--- a/openpype/hosts/fusion/api/pipeline.py
+++ b/openpype/hosts/fusion/api/pipeline.py
@@ -287,49 +287,6 @@ def parse_container(tool):
return container
-# TODO: Function below is currently unused prototypes
-def list_instances(creator_id=None):
- """Return created instances in current workfile which will be published.
- Returns:
- (list) of dictionaries matching instances format
- """
-
- comp = get_current_comp()
- tools = comp.GetToolList(False).values()
-
- instance_signature = {
- "id": "pyblish.avalon.instance",
- "identifier": creator_id
- }
- instances = []
- for tool in tools:
-
- data = tool.GetData('openpype')
- if not isinstance(data, dict):
- continue
-
- if data.get("id") != instance_signature["id"]:
- continue
-
- if creator_id and data.get("identifier") != creator_id:
- continue
-
- instances.append(tool)
-
- return instances
-
-
-# TODO: Function below is currently unused prototypes
-def remove_instance(instance):
- """Remove instance from current workfile.
-
- Args:
- instance (dict): instance representation from subsetmanager model
- """
- # Assume instance is a Fusion tool directly
- instance["tool"].Delete()
-
-
class FusionEventThread(QtCore.QThread):
"""QThread which will periodically ping Fusion app for any events.
The fusion.UIManager must be set up to be notified of events before they'll
diff --git a/openpype/hosts/fusion/deploy/MenuScripts/openpype_menu.py b/openpype/hosts/fusion/deploy/MenuScripts/openpype_menu.py
index 685e58d58f..1c58ee50e4 100644
--- a/openpype/hosts/fusion/deploy/MenuScripts/openpype_menu.py
+++ b/openpype/hosts/fusion/deploy/MenuScripts/openpype_menu.py
@@ -1,6 +1,19 @@
import os
import sys
+if sys.version_info < (3, 7):
+ # hack to handle discrepancy between distributed libraries and Python 3.6
+ # mostly because wrong version of urllib3
+ # TODO remove when not necessary
+ from openpype import PACKAGE_DIR
+ FUSION_HOST_DIR = os.path.join(PACKAGE_DIR, "hosts", "fusion")
+
+ vendor_path = os.path.join(FUSION_HOST_DIR, "vendor")
+ if vendor_path not in sys.path:
+ sys.path.insert(0, vendor_path)
+
+ print(f"Added vendorized libraries from {vendor_path}")
+
from openpype.lib import Logger
from openpype.pipeline import (
install_host,
diff --git a/openpype/hosts/fusion/deploy/Scripts/Comp/OpenPype/32bit/backgrounds_selected_to32bit.py b/openpype/hosts/fusion/deploy/Scripts/Comp/OpenPype/32bit/backgrounds_selected_to32bit.py
deleted file mode 100644
index 1a0a9911ea..0000000000
--- a/openpype/hosts/fusion/deploy/Scripts/Comp/OpenPype/32bit/backgrounds_selected_to32bit.py
+++ /dev/null
@@ -1,16 +0,0 @@
-from openpype.hosts.fusion.api import (
- comp_lock_and_undo_chunk,
- get_current_comp
-)
-
-
-def main():
- comp = get_current_comp()
- """Set all selected backgrounds to 32 bit"""
- with comp_lock_and_undo_chunk(comp, 'Selected Backgrounds to 32bit'):
- tools = comp.GetToolList(True, "Background").values()
- for tool in tools:
- tool.Depth = 5
-
-
-main()
diff --git a/openpype/hosts/fusion/deploy/Scripts/Comp/OpenPype/32bit/backgrounds_to32bit.py b/openpype/hosts/fusion/deploy/Scripts/Comp/OpenPype/32bit/backgrounds_to32bit.py
deleted file mode 100644
index c2eea505e5..0000000000
--- a/openpype/hosts/fusion/deploy/Scripts/Comp/OpenPype/32bit/backgrounds_to32bit.py
+++ /dev/null
@@ -1,16 +0,0 @@
-from openpype.hosts.fusion.api import (
- comp_lock_and_undo_chunk,
- get_current_comp
-)
-
-
-def main():
- comp = get_current_comp()
- """Set all backgrounds to 32 bit"""
- with comp_lock_and_undo_chunk(comp, 'Backgrounds to 32bit'):
- tools = comp.GetToolList(False, "Background").values()
- for tool in tools:
- tool.Depth = 5
-
-
-main()
diff --git a/openpype/hosts/fusion/deploy/Scripts/Comp/OpenPype/32bit/loaders_selected_to32bit.py b/openpype/hosts/fusion/deploy/Scripts/Comp/OpenPype/32bit/loaders_selected_to32bit.py
deleted file mode 100644
index 2118767f4d..0000000000
--- a/openpype/hosts/fusion/deploy/Scripts/Comp/OpenPype/32bit/loaders_selected_to32bit.py
+++ /dev/null
@@ -1,16 +0,0 @@
-from openpype.hosts.fusion.api import (
- comp_lock_and_undo_chunk,
- get_current_comp
-)
-
-
-def main():
- comp = get_current_comp()
- """Set all selected loaders to 32 bit"""
- with comp_lock_and_undo_chunk(comp, 'Selected Loaders to 32bit'):
- tools = comp.GetToolList(True, "Loader").values()
- for tool in tools:
- tool.Depth = 5
-
-
-main()
diff --git a/openpype/hosts/fusion/deploy/Scripts/Comp/OpenPype/32bit/loaders_to32bit.py b/openpype/hosts/fusion/deploy/Scripts/Comp/OpenPype/32bit/loaders_to32bit.py
deleted file mode 100644
index 7dd1f66a5e..0000000000
--- a/openpype/hosts/fusion/deploy/Scripts/Comp/OpenPype/32bit/loaders_to32bit.py
+++ /dev/null
@@ -1,16 +0,0 @@
-from openpype.hosts.fusion.api import (
- comp_lock_and_undo_chunk,
- get_current_comp
-)
-
-
-def main():
- comp = get_current_comp()
- """Set all loaders to 32 bit"""
- with comp_lock_and_undo_chunk(comp, 'Loaders to 32bit'):
- tools = comp.GetToolList(False, "Loader").values()
- for tool in tools:
- tool.Depth = 5
-
-
-main()
diff --git a/openpype/hosts/fusion/deploy/Scripts/Comp/OpenPype/switch_ui.py b/openpype/hosts/fusion/deploy/Scripts/Comp/OpenPype/switch_ui.py
deleted file mode 100644
index f08dc0bf2c..0000000000
--- a/openpype/hosts/fusion/deploy/Scripts/Comp/OpenPype/switch_ui.py
+++ /dev/null
@@ -1,200 +0,0 @@
-import os
-import sys
-import glob
-import logging
-
-from qtpy import QtWidgets, QtCore
-
-import qtawesome as qta
-
-from openpype.client import get_assets
-from openpype import style
-from openpype.pipeline import (
- install_host,
- legacy_io,
-)
-from openpype.hosts.fusion import api
-from openpype.pipeline.context_tools import get_workdir_from_session
-
-log = logging.getLogger("Fusion Switch Shot")
-
-
-class App(QtWidgets.QWidget):
-
- def __init__(self, parent=None):
-
- ################################################
- # |---------------------| |------------------| #
- # |Comp | |Asset | #
- # |[..][ v]| |[ v]| #
- # |---------------------| |------------------| #
- # | Update existing comp [ ] | #
- # |------------------------------------------| #
- # | Switch | #
- # |------------------------------------------| #
- ################################################
-
- QtWidgets.QWidget.__init__(self, parent)
-
- layout = QtWidgets.QVBoxLayout()
-
- # Comp related input
- comp_hlayout = QtWidgets.QHBoxLayout()
- comp_label = QtWidgets.QLabel("Comp file")
- comp_label.setFixedWidth(50)
- comp_box = QtWidgets.QComboBox()
-
- button_icon = qta.icon("fa.folder", color="white")
- open_from_dir = QtWidgets.QPushButton()
- open_from_dir.setIcon(button_icon)
-
- comp_box.setFixedHeight(25)
- open_from_dir.setFixedWidth(25)
- open_from_dir.setFixedHeight(25)
-
- comp_hlayout.addWidget(comp_label)
- comp_hlayout.addWidget(comp_box)
- comp_hlayout.addWidget(open_from_dir)
-
- # Asset related input
- asset_hlayout = QtWidgets.QHBoxLayout()
- asset_label = QtWidgets.QLabel("Shot")
- asset_label.setFixedWidth(50)
-
- asset_box = QtWidgets.QComboBox()
- asset_box.setLineEdit(QtWidgets.QLineEdit())
- asset_box.setFixedHeight(25)
-
- refresh_icon = qta.icon("fa.refresh", color="white")
- refresh_btn = QtWidgets.QPushButton()
- refresh_btn.setIcon(refresh_icon)
-
- asset_box.setFixedHeight(25)
- refresh_btn.setFixedWidth(25)
- refresh_btn.setFixedHeight(25)
-
- asset_hlayout.addWidget(asset_label)
- asset_hlayout.addWidget(asset_box)
- asset_hlayout.addWidget(refresh_btn)
-
- # Options
- options = QtWidgets.QHBoxLayout()
- options.setAlignment(QtCore.Qt.AlignLeft)
-
- current_comp_check = QtWidgets.QCheckBox()
- current_comp_check.setChecked(True)
- current_comp_label = QtWidgets.QLabel("Use current comp")
-
- options.addWidget(current_comp_label)
- options.addWidget(current_comp_check)
-
- accept_btn = QtWidgets.QPushButton("Switch")
-
- layout.addLayout(options)
- layout.addLayout(comp_hlayout)
- layout.addLayout(asset_hlayout)
- layout.addWidget(accept_btn)
-
- self._open_from_dir = open_from_dir
- self._comps = comp_box
- self._assets = asset_box
- self._use_current = current_comp_check
- self._accept_btn = accept_btn
- self._refresh_btn = refresh_btn
-
- self.setWindowTitle("Fusion Switch Shot")
- self.setLayout(layout)
-
- self.resize(260, 140)
- self.setMinimumWidth(260)
- self.setFixedHeight(140)
-
- self.connections()
-
- # Update ui to correct state
- self._on_use_current_comp()
- self._refresh()
-
- def connections(self):
- self._use_current.clicked.connect(self._on_use_current_comp)
- self._open_from_dir.clicked.connect(self._on_open_from_dir)
- self._refresh_btn.clicked.connect(self._refresh)
- self._accept_btn.clicked.connect(self._on_switch)
-
- def _on_use_current_comp(self):
- state = self._use_current.isChecked()
- self._open_from_dir.setEnabled(not state)
- self._comps.setEnabled(not state)
-
- def _on_open_from_dir(self):
-
- start_dir = get_workdir_from_session()
- comp_file, _ = QtWidgets.QFileDialog.getOpenFileName(
- self, "Choose comp", start_dir)
-
- if not comp_file:
- return
-
- # Create completer
- self.populate_comp_box([comp_file])
- self._refresh()
-
- def _refresh(self):
- # Clear any existing items
- self._assets.clear()
-
- asset_names = self.collect_asset_names()
- completer = QtWidgets.QCompleter(asset_names)
-
- self._assets.setCompleter(completer)
- self._assets.addItems(asset_names)
-
- def _on_switch(self):
-
- if not self._use_current.isChecked():
- file_name = self._comps.itemData(self._comps.currentIndex())
- else:
- comp = api.get_current_comp()
- file_name = comp.GetAttrs("COMPS_FileName")
-
- asset = self._assets.currentText()
-
- import colorbleed.scripts.fusion_switch_shot as switch_shot
- switch_shot.switch(asset_name=asset, filepath=file_name, new=True)
-
- def collect_slap_comps(self, directory):
- items = glob.glob("{}/*.comp".format(directory))
- return items
-
- def collect_asset_names(self):
- project_name = legacy_io.active_project()
- asset_docs = get_assets(project_name, fields=["name"])
- asset_names = {
- asset_doc["name"]
- for asset_doc in asset_docs
- }
- return list(asset_names)
-
- def populate_comp_box(self, files):
- """Ensure we display the filename only but the path is stored as well
-
- Args:
- files (list): list of full file path [path/to/item/item.ext,]
-
- Returns:
- None
- """
-
- for f in files:
- filename = os.path.basename(f)
- self._comps.addItem(filename, userData=f)
-
-
-if __name__ == '__main__':
- install_host(api)
-
- app = QtWidgets.QApplication(sys.argv)
- window = App()
- window.setStyleSheet(style.load_stylesheet())
- window.show()
- sys.exit(app.exec_())
diff --git a/openpype/hosts/fusion/deploy/Scripts/Comp/OpenPype/update_loader_ranges.py b/openpype/hosts/fusion/deploy/Scripts/Comp/OpenPype/update_loader_ranges.py
deleted file mode 100644
index 3d2d1ecfa6..0000000000
--- a/openpype/hosts/fusion/deploy/Scripts/Comp/OpenPype/update_loader_ranges.py
+++ /dev/null
@@ -1,40 +0,0 @@
-"""Forces Fusion to 'retrigger' the Loader to update.
-
-Warning:
- This might change settings like 'Reverse', 'Loop', trims and other
- settings of the Loader. So use this at your own risk.
-
-"""
-from openpype.hosts.fusion.api.pipeline import (
- get_current_comp,
- comp_lock_and_undo_chunk
-)
-
-
-def update_loader_ranges():
- comp = get_current_comp()
- with comp_lock_and_undo_chunk(comp, "Reload clip time ranges"):
- tools = comp.GetToolList(True, "Loader").values()
- for tool in tools:
-
- # Get tool attributes
- tool_a = tool.GetAttrs()
- clipTable = tool_a['TOOLST_Clip_Name']
- altclipTable = tool_a['TOOLST_AltClip_Name']
- startTime = tool_a['TOOLNT_Clip_Start']
- old_global_in = tool.GlobalIn[comp.CurrentTime]
-
- # Reapply
- for index, _ in clipTable.items():
- time = startTime[index]
- tool.Clip[time] = tool.Clip[time]
-
- for index, _ in altclipTable.items():
- time = startTime[index]
- tool.ProxyFilename[time] = tool.ProxyFilename[time]
-
- tool.GlobalIn[comp.CurrentTime] = old_global_in
-
-
-if __name__ == '__main__':
- update_loader_ranges()
diff --git a/openpype/hosts/fusion/deploy/fusion_shared.prefs b/openpype/hosts/fusion/deploy/fusion_shared.prefs
index b379ea7c66..93b08aa886 100644
--- a/openpype/hosts/fusion/deploy/fusion_shared.prefs
+++ b/openpype/hosts/fusion/deploy/fusion_shared.prefs
@@ -5,7 +5,7 @@ Global = {
Map = {
["OpenPype:"] = "$(OPENPYPE_FUSION)/deploy",
["Config:"] = "UserPaths:Config;OpenPype:Config",
- ["Scripts:"] = "UserPaths:Scripts;Reactor:System/Scripts;OpenPype:Scripts",
+ ["Scripts:"] = "UserPaths:Scripts;Reactor:System/Scripts",
},
},
Script = {
diff --git a/openpype/hosts/fusion/hooks/pre_fusion_profile_hook.py b/openpype/hosts/fusion/hooks/pre_fusion_profile_hook.py
index fd726ccda1..66b0f803aa 100644
--- a/openpype/hosts/fusion/hooks/pre_fusion_profile_hook.py
+++ b/openpype/hosts/fusion/hooks/pre_fusion_profile_hook.py
@@ -2,12 +2,16 @@ import os
import shutil
import platform
from pathlib import Path
-from openpype.lib import PreLaunchHook, ApplicationLaunchFailed
from openpype.hosts.fusion import (
FUSION_HOST_DIR,
FUSION_VERSIONS_DICT,
get_fusion_version,
)
+from openpype.lib.applications import (
+ PreLaunchHook,
+ LaunchTypes,
+ ApplicationLaunchFailed,
+)
class FusionCopyPrefsPrelaunch(PreLaunchHook):
@@ -21,8 +25,9 @@ class FusionCopyPrefsPrelaunch(PreLaunchHook):
Master.prefs is defined in openpype/hosts/fusion/deploy/fusion_shared.prefs
"""
- app_groups = ["fusion"]
+ app_groups = {"fusion"}
order = 2
+ launch_types = {LaunchTypes.local}
def get_fusion_profile_name(self, profile_version) -> str:
# Returns 'Default', unless FUSION16_PROFILE is set
diff --git a/openpype/hosts/fusion/hooks/pre_fusion_setup.py b/openpype/hosts/fusion/hooks/pre_fusion_setup.py
index f27cd1674b..576628e876 100644
--- a/openpype/hosts/fusion/hooks/pre_fusion_setup.py
+++ b/openpype/hosts/fusion/hooks/pre_fusion_setup.py
@@ -1,5 +1,9 @@
import os
-from openpype.lib import PreLaunchHook, ApplicationLaunchFailed
+from openpype.lib.applications import (
+ PreLaunchHook,
+ LaunchTypes,
+ ApplicationLaunchFailed,
+)
from openpype.hosts.fusion import (
FUSION_HOST_DIR,
FUSION_VERSIONS_DICT,
@@ -17,8 +21,9 @@ class FusionPrelaunch(PreLaunchHook):
Fusion 18 : Python 3.6 - 3.10
"""
- app_groups = ["fusion"]
+ app_groups = {"fusion"}
order = 1
+ launch_types = {LaunchTypes.local}
def execute(self):
# making sure python 3 is installed at provided path
diff --git a/openpype/hosts/fusion/plugins/create/create_saver.py b/openpype/hosts/fusion/plugins/create/create_saver.py
index 04898d0a45..4564880b50 100644
--- a/openpype/hosts/fusion/plugins/create/create_saver.py
+++ b/openpype/hosts/fusion/plugins/create/create_saver.py
@@ -30,10 +30,6 @@ class CreateSaver(NewCreator):
instance_attributes = [
"reviewable"
]
- default_variants = [
- "Main",
- "Mask"
- ]
# TODO: This should be renamed together with Nuke so it is aligned
temp_rendering_path_template = (
@@ -127,6 +123,9 @@ class CreateSaver(NewCreator):
def _imprint(self, tool, data):
# Save all data in a "openpype.{key}" = value data
+ # Instance id is the tool's name so we don't need to imprint as data
+ data.pop("instance_id", None)
+
active = data.pop("active", None)
if active is not None:
# Use active value to set the passthrough state
@@ -166,7 +165,8 @@ class CreateSaver(NewCreator):
filepath = self.temp_rendering_path_template.format(
**formatting_data)
- tool["Clip"] = os.path.normpath(filepath)
+ comp = get_current_comp()
+ tool["Clip"] = comp.ReverseMapPath(os.path.normpath(filepath))
# Rename tool
if tool.Name != subset:
@@ -192,6 +192,10 @@ class CreateSaver(NewCreator):
passthrough = attrs["TOOLB_PassThrough"]
data["active"] = not passthrough
+ # Override publisher's UUID generation because tool names are
+ # already unique in Fusion in a comp
+ data["instance_id"] = tool.Name
+
return data
def get_pre_create_attr_defs(self):
@@ -250,11 +254,7 @@ class CreateSaver(NewCreator):
label="Review",
)
- def apply_settings(
- self,
- project_settings,
- system_settings
- ):
+ def apply_settings(self, project_settings):
"""Method called on initialization of plugin to apply settings."""
# plugin settings
diff --git a/openpype/hosts/fusion/plugins/create/create_workfile.py b/openpype/hosts/fusion/plugins/create/create_workfile.py
index 40721ea88a..8acaaa172f 100644
--- a/openpype/hosts/fusion/plugins/create/create_workfile.py
+++ b/openpype/hosts/fusion/plugins/create/create_workfile.py
@@ -5,7 +5,6 @@ from openpype.client import get_asset_by_name
from openpype.pipeline import (
AutoCreator,
CreatedInstance,
- legacy_io,
)
@@ -64,10 +63,10 @@ class FusionWorkfileCreator(AutoCreator):
existing_instance = instance
break
- project_name = legacy_io.Session["AVALON_PROJECT"]
- asset_name = legacy_io.Session["AVALON_ASSET"]
- task_name = legacy_io.Session["AVALON_TASK"]
- host_name = legacy_io.Session["AVALON_APP"]
+ project_name = self.create_context.get_current_project_name()
+ asset_name = self.create_context.get_current_asset_name()
+ task_name = self.create_context.get_current_task_name()
+ host_name = self.create_context.host_name
if existing_instance is None:
asset_doc = get_asset_by_name(project_name, asset_name)
diff --git a/openpype/hosts/fusion/plugins/load/load_alembic.py b/openpype/hosts/fusion/plugins/load/load_alembic.py
index 11bf59af12..9b6d1e12b4 100644
--- a/openpype/hosts/fusion/plugins/load/load_alembic.py
+++ b/openpype/hosts/fusion/plugins/load/load_alembic.py
@@ -32,7 +32,7 @@ class FusionLoadAlembicMesh(load.LoaderPlugin):
comp = get_current_comp()
with comp_lock_and_undo_chunk(comp, "Create tool"):
- path = self.fname
+ path = self.filepath_from_context(context)
args = (-32768, -32768)
tool = comp.AddTool(self.tool_type, *args)
diff --git a/openpype/hosts/fusion/plugins/load/load_fbx.py b/openpype/hosts/fusion/plugins/load/load_fbx.py
index c73ad78394..d15d2c33d7 100644
--- a/openpype/hosts/fusion/plugins/load/load_fbx.py
+++ b/openpype/hosts/fusion/plugins/load/load_fbx.py
@@ -45,7 +45,7 @@ class FusionLoadFBXMesh(load.LoaderPlugin):
# Create the Loader with the filename path set
comp = get_current_comp()
with comp_lock_and_undo_chunk(comp, "Create tool"):
- path = self.fname
+ path = self.filepath_from_context(context)
args = (-32768, -32768)
tool = comp.AddTool(self.tool_type, *args)
diff --git a/openpype/hosts/fusion/plugins/load/load_sequence.py b/openpype/hosts/fusion/plugins/load/load_sequence.py
index 552e282587..4401af97eb 100644
--- a/openpype/hosts/fusion/plugins/load/load_sequence.py
+++ b/openpype/hosts/fusion/plugins/load/load_sequence.py
@@ -1,10 +1,7 @@
import contextlib
import openpype.pipeline.load as load
-from openpype.pipeline.load import (
- get_representation_context,
- get_representation_path_from_context,
-)
+from openpype.pipeline.load import get_representation_context
from openpype.hosts.fusion.api import (
imprint_container,
get_current_comp,
@@ -157,14 +154,14 @@ class FusionLoadSequence(load.LoaderPlugin):
namespace = context["asset"]["name"]
# Use the first file for now
- path = get_representation_path_from_context(context)
+ path = self.filepath_from_context(context)
# Create the Loader with the filename path set
comp = get_current_comp()
with comp_lock_and_undo_chunk(comp, "Create Loader"):
args = (-32768, -32768)
tool = comp.AddTool("Loader", *args)
- tool["Clip"] = path
+ tool["Clip"] = comp.ReverseMapPath(path)
# Set global in point to start frame (if in version.data)
start = self._get_start(context["version"], tool)
@@ -228,7 +225,7 @@ class FusionLoadSequence(load.LoaderPlugin):
comp = tool.Comp()
context = get_representation_context(representation)
- path = get_representation_path_from_context(context)
+ path = self.filepath_from_context(context)
# Get start frame from version data
start = self._get_start(context["version"], tool)
@@ -247,7 +244,7 @@ class FusionLoadSequence(load.LoaderPlugin):
"TimeCodeOffset",
),
):
- tool["Clip"] = path
+ tool["Clip"] = comp.ReverseMapPath(path)
# Set the global in to the start frame of the sequence
global_in_changed = loader_shift(tool, start, relative=False)
diff --git a/openpype/hosts/fusion/plugins/load/load_workfile.py b/openpype/hosts/fusion/plugins/load/load_workfile.py
index b49d104a15..14e36ca8fd 100644
--- a/openpype/hosts/fusion/plugins/load/load_workfile.py
+++ b/openpype/hosts/fusion/plugins/load/load_workfile.py
@@ -27,6 +27,7 @@ class FusionLoadWorkfile(load.LoaderPlugin):
# Get needed elements
bmd = get_bmd_library()
comp = get_current_comp()
+ path = self.filepath_from_context(context)
# Paste the content of the file into the current comp
- comp.Paste(bmd.readfile(self.fname))
+ comp.Paste(bmd.readfile(path))
diff --git a/openpype/hosts/fusion/plugins/publish/collect_instances.py b/openpype/hosts/fusion/plugins/publish/collect_instances.py
index 6016baa2a9..4d6da79b77 100644
--- a/openpype/hosts/fusion/plugins/publish/collect_instances.py
+++ b/openpype/hosts/fusion/plugins/publish/collect_instances.py
@@ -85,5 +85,5 @@ class CollectInstanceData(pyblish.api.InstancePlugin):
# Add review family if the instance is marked as 'review'
# This could be done through a 'review' Creator attribute.
if instance.data.get("review", False):
- self.log.info("Adding review family..")
+ self.log.debug("Adding review family..")
instance.data["families"].append("review")
diff --git a/openpype/hosts/fusion/plugins/publish/collect_render.py b/openpype/hosts/fusion/plugins/publish/collect_render.py
index a20a142701..a7daa0b64c 100644
--- a/openpype/hosts/fusion/plugins/publish/collect_render.py
+++ b/openpype/hosts/fusion/plugins/publish/collect_render.py
@@ -108,7 +108,6 @@ class CollectFusionRender(
fam = "render.farm"
if fam not in instance.families:
instance.families.append(fam)
- instance.toBeRenderedOn = "deadline"
instance.farm = True # to skip integrate
if "review" in instance.families:
# to skip ExtractReview locally
@@ -146,9 +145,11 @@ class CollectFusionRender(
start = render_instance.frameStart - render_instance.handleStart
end = render_instance.frameEnd + render_instance.handleEnd
- path = (
- render_instance.tool["Clip"]
- [render_instance.workfileComp.TIME_UNDEFINED]
+ comp = render_instance.workfileComp
+ path = comp.MapPath(
+ render_instance.tool["Clip"][
+ render_instance.workfileComp.TIME_UNDEFINED
+ ]
)
output_dir = os.path.dirname(path)
render_instance.outputDir = output_dir
diff --git a/openpype/hosts/fusion/vendor/attr/__init__.py b/openpype/hosts/fusion/vendor/attr/__init__.py
new file mode 100644
index 0000000000..b1ce7fe248
--- /dev/null
+++ b/openpype/hosts/fusion/vendor/attr/__init__.py
@@ -0,0 +1,78 @@
+from __future__ import absolute_import, division, print_function
+
+import sys
+
+from functools import partial
+
+from . import converters, exceptions, filters, setters, validators
+from ._cmp import cmp_using
+from ._config import get_run_validators, set_run_validators
+from ._funcs import asdict, assoc, astuple, evolve, has, resolve_types
+from ._make import (
+ NOTHING,
+ Attribute,
+ Factory,
+ attrib,
+ attrs,
+ fields,
+ fields_dict,
+ make_class,
+ validate,
+)
+from ._version_info import VersionInfo
+
+
+__version__ = "21.2.0"
+__version_info__ = VersionInfo._from_version_string(__version__)
+
+__title__ = "attrs"
+__description__ = "Classes Without Boilerplate"
+__url__ = "https://www.attrs.org/"
+__uri__ = __url__
+__doc__ = __description__ + " <" + __uri__ + ">"
+
+__author__ = "Hynek Schlawack"
+__email__ = "hs@ox.cx"
+
+__license__ = "MIT"
+__copyright__ = "Copyright (c) 2015 Hynek Schlawack"
+
+
+s = attributes = attrs
+ib = attr = attrib
+dataclass = partial(attrs, auto_attribs=True) # happy Easter ;)
+
+__all__ = [
+ "Attribute",
+ "Factory",
+ "NOTHING",
+ "asdict",
+ "assoc",
+ "astuple",
+ "attr",
+ "attrib",
+ "attributes",
+ "attrs",
+ "cmp_using",
+ "converters",
+ "evolve",
+ "exceptions",
+ "fields",
+ "fields_dict",
+ "filters",
+ "get_run_validators",
+ "has",
+ "ib",
+ "make_class",
+ "resolve_types",
+ "s",
+ "set_run_validators",
+ "setters",
+ "validate",
+ "validators",
+]
+
+if sys.version_info[:2] >= (3, 6):
+ from ._next_gen import define, field, frozen, mutable
+
+ __all__.extend((define, field, frozen, mutable))
diff --git a/openpype/hosts/fusion/vendor/attr/__init__.pyi b/openpype/hosts/fusion/vendor/attr/__init__.pyi
new file mode 100644
index 0000000000..3503b073b4
--- /dev/null
+++ b/openpype/hosts/fusion/vendor/attr/__init__.pyi
@@ -0,0 +1,475 @@
+import sys
+
+from typing import (
+ Any,
+ Callable,
+ Dict,
+ Generic,
+ List,
+ Mapping,
+ Optional,
+ Sequence,
+ Tuple,
+ Type,
+ TypeVar,
+ Union,
+ overload,
+)
+
+# `import X as X` is required to make these public
+from . import converters as converters
+from . import exceptions as exceptions
+from . import filters as filters
+from . import setters as setters
+from . import validators as validators
+from ._version_info import VersionInfo
+
+
+__version__: str
+__version_info__: VersionInfo
+__title__: str
+__description__: str
+__url__: str
+__uri__: str
+__author__: str
+__email__: str
+__license__: str
+__copyright__: str
+
+_T = TypeVar("_T")
+_C = TypeVar("_C", bound=type)
+
+_EqOrderType = Union[bool, Callable[[Any], Any]]
+_ValidatorType = Callable[[Any, Attribute[_T], _T], Any]
+_ConverterType = Callable[[Any], Any]
+_FilterType = Callable[[Attribute[_T], _T], bool]
+_ReprType = Callable[[Any], str]
+_ReprArgType = Union[bool, _ReprType]
+_OnSetAttrType = Callable[[Any, Attribute[Any], Any], Any]
+_OnSetAttrArgType = Union[
+ _OnSetAttrType, List[_OnSetAttrType], setters._NoOpType
+]
+_FieldTransformer = Callable[[type, List[Attribute[Any]]], List[Attribute[Any]]]
+# FIXME: in reality, if multiple validators are passed they must be in a list
+# or tuple, but those are invariant and so would prevent subtypes of
+# _ValidatorType from working when passed in a list or tuple.
+_ValidatorArgType = Union[_ValidatorType[_T], Sequence[_ValidatorType[_T]]]
+
+# _make --
+
+NOTHING: object
+
+# NOTE: Factory lies about its return type to make this possible:
+# `x: List[int] # = Factory(list)`
+# Work around mypy issue #4554 in the common case by using an overload.
+if sys.version_info >= (3, 8):
+ from typing import Literal
+
+ @overload
+ def Factory(factory: Callable[[], _T]) -> _T: ...
+ @overload
+ def Factory(
+ factory: Callable[[Any], _T],
+ takes_self: Literal[True],
+ ) -> _T: ...
+ @overload
+ def Factory(
+ factory: Callable[[], _T],
+ takes_self: Literal[False],
+ ) -> _T: ...
+else:
+ @overload
+ def Factory(factory: Callable[[], _T]) -> _T: ...
+ @overload
+ def Factory(
+ factory: Union[Callable[[Any], _T], Callable[[], _T]],
+ takes_self: bool = ...,
+ ) -> _T: ...
+
+# Static type inference support via __dataclass_transform__ implemented as per:
+# https://github.com/microsoft/pyright/blob/1.1.135/specs/dataclass_transforms.md
+# This annotation must be applied to all overloads of "define" and "attrs"
+#
+# NOTE: This is a typing construct and does not exist at runtime. Extensions
+# wrapping attrs decorators should declare a separate __dataclass_transform__
+# signature in the extension module using the specification linked above to
+# provide pyright support.
+def __dataclass_transform__(
+ *,
+ eq_default: bool = True,
+ order_default: bool = False,
+ kw_only_default: bool = False,
+ field_descriptors: Tuple[Union[type, Callable[..., Any]], ...] = (()),
+) -> Callable[[_T], _T]: ...
+
+class Attribute(Generic[_T]):
+ name: str
+ default: Optional[_T]
+ validator: Optional[_ValidatorType[_T]]
+ repr: _ReprArgType
+ cmp: _EqOrderType
+ eq: _EqOrderType
+ order: _EqOrderType
+ hash: Optional[bool]
+ init: bool
+ converter: Optional[_ConverterType]
+ metadata: Dict[Any, Any]
+ type: Optional[Type[_T]]
+ kw_only: bool
+ on_setattr: _OnSetAttrType
+
+ def evolve(self, **changes: Any) -> "Attribute[Any]": ...
+
+# NOTE: We had several choices for the annotation to use for type arg:
+# 1) Type[_T]
+# - Pros: Handles simple cases correctly
+# - Cons: Might produce less informative errors in the case of conflicting
+# TypeVars e.g. `attr.ib(default='bad', type=int)`
+# 2) Callable[..., _T]
+# - Pros: Better error messages than #1 for conflicting TypeVars
+# - Cons: Terrible error messages for validator checks.
+# e.g. attr.ib(type=int, validator=validate_str)
+# -> error: Cannot infer function type argument
+# 3) type (and do all of the work in the mypy plugin)
+# - Pros: Simple here, and we could customize the plugin with our own errors.
+# - Cons: Would need to write mypy plugin code to handle all the cases.
+# We chose option #1.
+
+# `attr` lies about its return type to make the following possible:
+# attr() -> Any
+# attr(8) -> int
+# attr(validator=) -> Whatever the callable expects.
+# This makes this type of assignments possible:
+# x: int = attr(8)
+#
+# This form catches explicit None or no default but with no other arguments
+# returns Any.
+@overload
+def attrib(
+ default: None = ...,
+ validator: None = ...,
+ repr: _ReprArgType = ...,
+ cmp: Optional[_EqOrderType] = ...,
+ hash: Optional[bool] = ...,
+ init: bool = ...,
+ metadata: Optional[Mapping[Any, Any]] = ...,
+ type: None = ...,
+ converter: None = ...,
+ factory: None = ...,
+ kw_only: bool = ...,
+ eq: Optional[_EqOrderType] = ...,
+ order: Optional[_EqOrderType] = ...,
+ on_setattr: Optional[_OnSetAttrArgType] = ...,
+) -> Any: ...
+
+# This form catches an explicit None or no default and infers the type from the
+# other arguments.
+@overload
+def attrib(
+ default: None = ...,
+ validator: Optional[_ValidatorArgType[_T]] = ...,
+ repr: _ReprArgType = ...,
+ cmp: Optional[_EqOrderType] = ...,
+ hash: Optional[bool] = ...,
+ init: bool = ...,
+ metadata: Optional[Mapping[Any, Any]] = ...,
+ type: Optional[Type[_T]] = ...,
+ converter: Optional[_ConverterType] = ...,
+ factory: Optional[Callable[[], _T]] = ...,
+ kw_only: bool = ...,
+ eq: Optional[_EqOrderType] = ...,
+ order: Optional[_EqOrderType] = ...,
+ on_setattr: Optional[_OnSetAttrArgType] = ...,
+) -> _T: ...
+
+# This form catches an explicit default argument.
+@overload
+def attrib(
+ default: _T,
+ validator: Optional[_ValidatorArgType[_T]] = ...,
+ repr: _ReprArgType = ...,
+ cmp: Optional[_EqOrderType] = ...,
+ hash: Optional[bool] = ...,
+ init: bool = ...,
+ metadata: Optional[Mapping[Any, Any]] = ...,
+ type: Optional[Type[_T]] = ...,
+ converter: Optional[_ConverterType] = ...,
+ factory: Optional[Callable[[], _T]] = ...,
+ kw_only: bool = ...,
+ eq: Optional[_EqOrderType] = ...,
+ order: Optional[_EqOrderType] = ...,
+ on_setattr: Optional[_OnSetAttrArgType] = ...,
+) -> _T: ...
+
+# This form covers type=non-Type: e.g. forward references (str), Any
+@overload
+def attrib(
+ default: Optional[_T] = ...,
+ validator: Optional[_ValidatorArgType[_T]] = ...,
+ repr: _ReprArgType = ...,
+ cmp: Optional[_EqOrderType] = ...,
+ hash: Optional[bool] = ...,
+ init: bool = ...,
+ metadata: Optional[Mapping[Any, Any]] = ...,
+ type: object = ...,
+ converter: Optional[_ConverterType] = ...,
+ factory: Optional[Callable[[], _T]] = ...,
+ kw_only: bool = ...,
+ eq: Optional[_EqOrderType] = ...,
+ order: Optional[_EqOrderType] = ...,
+ on_setattr: Optional[_OnSetAttrArgType] = ...,
+) -> Any: ...
+@overload
+def field(
+ *,
+ default: None = ...,
+ validator: None = ...,
+ repr: _ReprArgType = ...,
+ hash: Optional[bool] = ...,
+ init: bool = ...,
+ metadata: Optional[Mapping[Any, Any]] = ...,
+ converter: None = ...,
+ factory: None = ...,
+ kw_only: bool = ...,
+ eq: Optional[bool] = ...,
+ order: Optional[bool] = ...,
+ on_setattr: Optional[_OnSetAttrArgType] = ...,
+) -> Any: ...
+
+# This form catches an explicit None or no default and infers the type from the
+# other arguments.
+@overload
+def field(
+ *,
+ default: None = ...,
+ validator: Optional[_ValidatorArgType[_T]] = ...,
+ repr: _ReprArgType = ...,
+ hash: Optional[bool] = ...,
+ init: bool = ...,
+ metadata: Optional[Mapping[Any, Any]] = ...,
+ converter: Optional[_ConverterType] = ...,
+ factory: Optional[Callable[[], _T]] = ...,
+ kw_only: bool = ...,
+ eq: Optional[_EqOrderType] = ...,
+ order: Optional[_EqOrderType] = ...,
+ on_setattr: Optional[_OnSetAttrArgType] = ...,
+) -> _T: ...
+
+# This form catches an explicit default argument.
+@overload
+def field(
+ *,
+ default: _T,
+ validator: Optional[_ValidatorArgType[_T]] = ...,
+ repr: _ReprArgType = ...,
+ hash: Optional[bool] = ...,
+ init: bool = ...,
+ metadata: Optional[Mapping[Any, Any]] = ...,
+ converter: Optional[_ConverterType] = ...,
+ factory: Optional[Callable[[], _T]] = ...,
+ kw_only: bool = ...,
+ eq: Optional[_EqOrderType] = ...,
+ order: Optional[_EqOrderType] = ...,
+ on_setattr: Optional[_OnSetAttrArgType] = ...,
+) -> _T: ...
+
+# This form covers type=non-Type: e.g. forward references (str), Any
+@overload
+def field(
+ *,
+ default: Optional[_T] = ...,
+ validator: Optional[_ValidatorArgType[_T]] = ...,
+ repr: _ReprArgType = ...,
+ hash: Optional[bool] = ...,
+ init: bool = ...,
+ metadata: Optional[Mapping[Any, Any]] = ...,
+ converter: Optional[_ConverterType] = ...,
+ factory: Optional[Callable[[], _T]] = ...,
+ kw_only: bool = ...,
+ eq: Optional[_EqOrderType] = ...,
+ order: Optional[_EqOrderType] = ...,
+ on_setattr: Optional[_OnSetAttrArgType] = ...,
+) -> Any: ...
+@overload
+@__dataclass_transform__(order_default=True, field_descriptors=(attrib, field))
+def attrs(
+ maybe_cls: _C,
+ these: Optional[Dict[str, Any]] = ...,
+ repr_ns: Optional[str] = ...,
+ repr: bool = ...,
+ cmp: Optional[_EqOrderType] = ...,
+ hash: Optional[bool] = ...,
+ init: bool = ...,
+ slots: bool = ...,
+ frozen: bool = ...,
+ weakref_slot: bool = ...,
+ str: bool = ...,
+ auto_attribs: bool = ...,
+ kw_only: bool = ...,
+ cache_hash: bool = ...,
+ auto_exc: bool = ...,
+ eq: Optional[_EqOrderType] = ...,
+ order: Optional[_EqOrderType] = ...,
+ auto_detect: bool = ...,
+ collect_by_mro: bool = ...,
+ getstate_setstate: Optional[bool] = ...,
+ on_setattr: Optional[_OnSetAttrArgType] = ...,
+ field_transformer: Optional[_FieldTransformer] = ...,
+) -> _C: ...
+@overload
+@__dataclass_transform__(order_default=True, field_descriptors=(attrib, field))
+def attrs(
+ maybe_cls: None = ...,
+ these: Optional[Dict[str, Any]] = ...,
+ repr_ns: Optional[str] = ...,
+ repr: bool = ...,
+ cmp: Optional[_EqOrderType] = ...,
+ hash: Optional[bool] = ...,
+ init: bool = ...,
+ slots: bool = ...,
+ frozen: bool = ...,
+ weakref_slot: bool = ...,
+ str: bool = ...,
+ auto_attribs: bool = ...,
+ kw_only: bool = ...,
+ cache_hash: bool = ...,
+ auto_exc: bool = ...,
+ eq: Optional[_EqOrderType] = ...,
+ order: Optional[_EqOrderType] = ...,
+ auto_detect: bool = ...,
+ collect_by_mro: bool = ...,
+ getstate_setstate: Optional[bool] = ...,
+ on_setattr: Optional[_OnSetAttrArgType] = ...,
+ field_transformer: Optional[_FieldTransformer] = ...,
+) -> Callable[[_C], _C]: ...
+@overload
+@__dataclass_transform__(field_descriptors=(attrib, field))
+def define(
+ maybe_cls: _C,
+ *,
+ these: Optional[Dict[str, Any]] = ...,
+ repr: bool = ...,
+ hash: Optional[bool] = ...,
+ init: bool = ...,
+ slots: bool = ...,
+ frozen: bool = ...,
+ weakref_slot: bool = ...,
+ str: bool = ...,
+ auto_attribs: bool = ...,
+ kw_only: bool = ...,
+ cache_hash: bool = ...,
+ auto_exc: bool = ...,
+ eq: Optional[bool] = ...,
+ order: Optional[bool] = ...,
+ auto_detect: bool = ...,
+ getstate_setstate: Optional[bool] = ...,
+ on_setattr: Optional[_OnSetAttrArgType] = ...,
+ field_transformer: Optional[_FieldTransformer] = ...,
+) -> _C: ...
+@overload
+@__dataclass_transform__(field_descriptors=(attrib, field))
+def define(
+ maybe_cls: None = ...,
+ *,
+ these: Optional[Dict[str, Any]] = ...,
+ repr: bool = ...,
+ hash: Optional[bool] = ...,
+ init: bool = ...,
+ slots: bool = ...,
+ frozen: bool = ...,
+ weakref_slot: bool = ...,
+ str: bool = ...,
+ auto_attribs: bool = ...,
+ kw_only: bool = ...,
+ cache_hash: bool = ...,
+ auto_exc: bool = ...,
+ eq: Optional[bool] = ...,
+ order: Optional[bool] = ...,
+ auto_detect: bool = ...,
+ getstate_setstate: Optional[bool] = ...,
+ on_setattr: Optional[_OnSetAttrArgType] = ...,
+ field_transformer: Optional[_FieldTransformer] = ...,
+) -> Callable[[_C], _C]: ...
+
+mutable = define
+frozen = define # they differ only in their defaults
+
+# TODO: add support for returning NamedTuple from the mypy plugin
+class _Fields(Tuple[Attribute[Any], ...]):
+ def __getattr__(self, name: str) -> Attribute[Any]: ...
+
+def fields(cls: type) -> _Fields: ...
+def fields_dict(cls: type) -> Dict[str, Attribute[Any]]: ...
+def validate(inst: Any) -> None: ...
+def resolve_types(
+ cls: _C,
+ globalns: Optional[Dict[str, Any]] = ...,
+ localns: Optional[Dict[str, Any]] = ...,
+ attribs: Optional[List[Attribute[Any]]] = ...,
+) -> _C: ...
+
+# TODO: add support for returning a proper attrs class from the mypy plugin
+# we use Any instead of _CountingAttr so that e.g. `make_class('Foo',
+# [attr.ib()])` is valid
+def make_class(
+ name: str,
+ attrs: Union[List[str], Tuple[str, ...], Dict[str, Any]],
+ bases: Tuple[type, ...] = ...,
+ repr_ns: Optional[str] = ...,
+ repr: bool = ...,
+ cmp: Optional[_EqOrderType] = ...,
+ hash: Optional[bool] = ...,
+ init: bool = ...,
+ slots: bool = ...,
+ frozen: bool = ...,
+ weakref_slot: bool = ...,
+ str: bool = ...,
+ auto_attribs: bool = ...,
+ kw_only: bool = ...,
+ cache_hash: bool = ...,
+ auto_exc: bool = ...,
+ eq: Optional[_EqOrderType] = ...,
+ order: Optional[_EqOrderType] = ...,
+ collect_by_mro: bool = ...,
+ on_setattr: Optional[_OnSetAttrArgType] = ...,
+ field_transformer: Optional[_FieldTransformer] = ...,
+) -> type: ...
+
+# _funcs --
+
+# TODO: add support for returning TypedDict from the mypy plugin
+# FIXME: asdict/astuple do not honor their factory args. Waiting on one of
+# these:
+# https://github.com/python/mypy/issues/4236
+# https://github.com/python/typing/issues/253
+def asdict(
+ inst: Any,
+ recurse: bool = ...,
+ filter: Optional[_FilterType[Any]] = ...,
+ dict_factory: Type[Mapping[Any, Any]] = ...,
+ retain_collection_types: bool = ...,
+ value_serializer: Optional[Callable[[type, Attribute[Any], Any], Any]] = ...,
+) -> Dict[str, Any]: ...
+
+# TODO: add support for returning NamedTuple from the mypy plugin
+def astuple(
+ inst: Any,
+ recurse: bool = ...,
+ filter: Optional[_FilterType[Any]] = ...,
+ tuple_factory: Type[Sequence[Any]] = ...,
+ retain_collection_types: bool = ...,
+) -> Tuple[Any, ...]: ...
+def has(cls: type) -> bool: ...
+def assoc(inst: _T, **changes: Any) -> _T: ...
+def evolve(inst: _T, **changes: Any) -> _T: ...
+
+# _config --
+
+def set_run_validators(run: bool) -> None: ...
+def get_run_validators() -> bool: ...
+
+# aliases --
+
+s = attributes = attrs
+ib = attr = attrib
+dataclass = attrs # Technically, partial(attrs, auto_attribs=True) ;)
diff --git a/openpype/hosts/fusion/vendor/attr/_cmp.py b/openpype/hosts/fusion/vendor/attr/_cmp.py
new file mode 100644
index 0000000000..b747b603f1
--- /dev/null
+++ b/openpype/hosts/fusion/vendor/attr/_cmp.py
@@ -0,0 +1,152 @@
+from __future__ import absolute_import, division, print_function
+
+import functools
+
+from ._compat import new_class
+from ._make import _make_ne
+
+
+_operation_names = {"eq": "==", "lt": "<", "le": "<=", "gt": ">", "ge": ">="}
+
+
+def cmp_using(
+ eq=None,
+ lt=None,
+ le=None,
+ gt=None,
+ ge=None,
+ require_same_type=True,
+ class_name="Comparable",
+):
+ """
+ Create a class that can be passed into `attr.ib`'s ``eq``, ``order``, and
+ ``cmp`` arguments to customize field comparison.
+
+ The resulting class will have a full set of ordering methods if
+ at least one of ``{lt, le, gt, ge}`` and ``eq`` are provided.
+
+ :param Optional[callable] eq: `callable` used to evaluate equality
+ of two objects.
+ :param Optional[callable] lt: `callable` used to evaluate whether
+ one object is less than another object.
+ :param Optional[callable] le: `callable` used to evaluate whether
+ one object is less than or equal to another object.
+ :param Optional[callable] gt: `callable` used to evaluate whether
+ one object is greater than another object.
+ :param Optional[callable] ge: `callable` used to evaluate whether
+ one object is greater than or equal to another object.
+
+ :param bool require_same_type: When `True`, equality and ordering methods
+ will return `NotImplemented` if objects are not of the same type.
+
+ :param Optional[str] class_name: Name of class. Defaults to 'Comparable'.
+
+ See `comparison` for more details.
+
+ .. versionadded:: 21.1.0
+ """
+
+ body = {
+ "__slots__": ["value"],
+ "__init__": _make_init(),
+ "_requirements": [],
+ "_is_comparable_to": _is_comparable_to,
+ }
+
+ # Add operations.
+ num_order_functions = 0
+ has_eq_function = False
+
+ if eq is not None:
+ has_eq_function = True
+ body["__eq__"] = _make_operator("eq", eq)
+ body["__ne__"] = _make_ne()
+
+ if lt is not None:
+ num_order_functions += 1
+ body["__lt__"] = _make_operator("lt", lt)
+
+ if le is not None:
+ num_order_functions += 1
+ body["__le__"] = _make_operator("le", le)
+
+ if gt is not None:
+ num_order_functions += 1
+ body["__gt__"] = _make_operator("gt", gt)
+
+ if ge is not None:
+ num_order_functions += 1
+ body["__ge__"] = _make_operator("ge", ge)
+
+ type_ = new_class(class_name, (object,), {}, lambda ns: ns.update(body))
+
+ # Add same type requirement.
+ if require_same_type:
+ type_._requirements.append(_check_same_type)
+
+ # Add total ordering if at least one operation was defined.
+ if 0 < num_order_functions < 4:
+ if not has_eq_function:
+ # functools.total_ordering requires __eq__ to be defined,
+ # so raise early error here to keep a nice stack.
+ raise ValueError(
+ "eq must be define is order to complete ordering from "
+ "lt, le, gt, ge."
+ )
+ type_ = functools.total_ordering(type_)
+
+ return type_
+
+
+def _make_init():
+ """
+ Create __init__ method.
+ """
+
+ def __init__(self, value):
+ """
+ Initialize object with *value*.
+ """
+ self.value = value
+
+ return __init__
+
+
+def _make_operator(name, func):
+ """
+ Create operator method.
+ """
+
+ def method(self, other):
+ if not self._is_comparable_to(other):
+ return NotImplemented
+
+ result = func(self.value, other.value)
+ if result is NotImplemented:
+ return NotImplemented
+
+ return result
+
+ method.__name__ = "__%s__" % (name,)
+ method.__doc__ = "Return a %s b. Computed by attrs." % (
+ _operation_names[name],
+ )
+
+ return method
+
+
+def _is_comparable_to(self, other):
+ """
+ Check whether `other` is comparable to `self`.
+ """
+ for func in self._requirements:
+ if not func(self, other):
+ return False
+ return True
+
+
+def _check_same_type(self, other):
+ """
+ Return True if *self* and *other* are of the same type, False otherwise.
+ """
+ return other.value.__class__ is self.value.__class__
diff --git a/openpype/hosts/fusion/vendor/attr/_cmp.pyi b/openpype/hosts/fusion/vendor/attr/_cmp.pyi
new file mode 100644
index 0000000000..7093550f0f
--- /dev/null
+++ b/openpype/hosts/fusion/vendor/attr/_cmp.pyi
@@ -0,0 +1,14 @@
+from typing import Type
+
+from . import _CompareWithType
+
+
+def cmp_using(
+ eq: Optional[_CompareWithType],
+ lt: Optional[_CompareWithType],
+ le: Optional[_CompareWithType],
+ gt: Optional[_CompareWithType],
+ ge: Optional[_CompareWithType],
+ require_same_type: bool,
+ class_name: str,
+) -> Type: ...
diff --git a/openpype/hosts/fusion/vendor/attr/_compat.py b/openpype/hosts/fusion/vendor/attr/_compat.py
new file mode 100644
index 0000000000..6939f338da
--- /dev/null
+++ b/openpype/hosts/fusion/vendor/attr/_compat.py
@@ -0,0 +1,242 @@
+from __future__ import absolute_import, division, print_function
+
+import platform
+import sys
+import types
+import warnings
+
+
+PY2 = sys.version_info[0] == 2
+PYPY = platform.python_implementation() == "PyPy"
+
+
+if PYPY or sys.version_info[:2] >= (3, 6):
+ ordered_dict = dict
+else:
+ from collections import OrderedDict
+
+ ordered_dict = OrderedDict
+
+
+if PY2:
+ from collections import Mapping, Sequence
+
+ from UserDict import IterableUserDict
+
+ # We 'bundle' isclass instead of using inspect as importing inspect is
+ # fairly expensive (order of 10-15 ms for a modern machine in 2016)
+ def isclass(klass):
+ return isinstance(klass, (type, types.ClassType))
+
+ def new_class(name, bases, kwds, exec_body):
+ """
+ A minimal stub of types.new_class that we need for make_class.
+ """
+ ns = {}
+ exec_body(ns)
+
+ return type(name, bases, ns)
+
+ # TYPE is used in exceptions, repr(int) is different on Python 2 and 3.
+ TYPE = "type"
+
+ def iteritems(d):
+ return d.iteritems()
+
+ # Python 2 is bereft of a read-only dict proxy, so we make one!
+ class ReadOnlyDict(IterableUserDict):
+ """
+ Best-effort read-only dict wrapper.
+ """
+
+ def __setitem__(self, key, val):
+ # We gently pretend we're a Python 3 mappingproxy.
+ raise TypeError(
+ "'mappingproxy' object does not support item assignment"
+ )
+
+ def update(self, _):
+ # We gently pretend we're a Python 3 mappingproxy.
+ raise AttributeError(
+ "'mappingproxy' object has no attribute 'update'"
+ )
+
+ def __delitem__(self, _):
+ # We gently pretend we're a Python 3 mappingproxy.
+ raise TypeError(
+ "'mappingproxy' object does not support item deletion"
+ )
+
+ def clear(self):
+ # We gently pretend we're a Python 3 mappingproxy.
+ raise AttributeError(
+ "'mappingproxy' object has no attribute 'clear'"
+ )
+
+ def pop(self, key, default=None):
+ # We gently pretend we're a Python 3 mappingproxy.
+ raise AttributeError(
+ "'mappingproxy' object has no attribute 'pop'"
+ )
+
+ def popitem(self):
+ # We gently pretend we're a Python 3 mappingproxy.
+ raise AttributeError(
+ "'mappingproxy' object has no attribute 'popitem'"
+ )
+
+ def setdefault(self, key, default=None):
+ # We gently pretend we're a Python 3 mappingproxy.
+ raise AttributeError(
+ "'mappingproxy' object has no attribute 'setdefault'"
+ )
+
+ def __repr__(self):
+ # Override to be identical to the Python 3 version.
+ return "mappingproxy(" + repr(self.data) + ")"
+
+ def metadata_proxy(d):
+ res = ReadOnlyDict()
+ res.data.update(d) # We blocked update, so we have to do it like this.
+ return res
+
+ def just_warn(*args, **kw): # pragma: no cover
+ """
+ We only warn on Python 3 because we are not aware of any concrete
+ consequences of not setting the cell on Python 2.
+ """
+
+
+else: # Python 3 and later.
+ from collections.abc import Mapping, Sequence # noqa
+
+ def just_warn(*args, **kw):
+ """
+ We only warn on Python 3 because we are not aware of any concrete
+ consequences of not setting the cell on Python 2.
+ """
+ warnings.warn(
+ "Running interpreter doesn't sufficiently support code object "
+ "introspection. Some features like bare super() or accessing "
+ "__class__ will not work with slotted classes.",
+ RuntimeWarning,
+ stacklevel=2,
+ )
+
+ def isclass(klass):
+ return isinstance(klass, type)
+
+ TYPE = "class"
+
+ def iteritems(d):
+ return d.items()
+
+ new_class = types.new_class
+
+ def metadata_proxy(d):
+ return types.MappingProxyType(dict(d))
+
+
+def make_set_closure_cell():
+ """Return a function of two arguments (cell, value) which sets
+ the value stored in the closure cell `cell` to `value`.
+ """
+ # pypy makes this easy. (It also supports the logic below, but
+ # why not do the easy/fast thing?)
+ if PYPY:
+
+ def set_closure_cell(cell, value):
+ cell.__setstate__((value,))
+
+ return set_closure_cell
+
+ # Otherwise gotta do it the hard way.
+
+ # Create a function that will set its first cellvar to `value`.
+ def set_first_cellvar_to(value):
+ x = value
+ return
+
+ # This function will be eliminated as dead code, but
+ # not before its reference to `x` forces `x` to be
+ # represented as a closure cell rather than a local.
+ def force_x_to_be_a_cell(): # pragma: no cover
+ return x
+
+ try:
+ # Extract the code object and make sure our assumptions about
+ # the closure behavior are correct.
+ if PY2:
+ co = set_first_cellvar_to.func_code
+ else:
+ co = set_first_cellvar_to.__code__
+ if co.co_cellvars != ("x",) or co.co_freevars != ():
+ raise AssertionError # pragma: no cover
+
+ # Convert this code object to a code object that sets the
+ # function's first _freevar_ (not cellvar) to the argument.
+ if sys.version_info >= (3, 8):
+ # CPython 3.8+ has an incompatible CodeType signature
+ # (added a posonlyargcount argument) but also added
+ # CodeType.replace() to do this without counting parameters.
+ set_first_freevar_code = co.replace(
+ co_cellvars=co.co_freevars, co_freevars=co.co_cellvars
+ )
+ else:
+ args = [co.co_argcount]
+ if not PY2:
+ args.append(co.co_kwonlyargcount)
+ args.extend(
+ [
+ co.co_nlocals,
+ co.co_stacksize,
+ co.co_flags,
+ co.co_code,
+ co.co_consts,
+ co.co_names,
+ co.co_varnames,
+ co.co_filename,
+ co.co_name,
+ co.co_firstlineno,
+ co.co_lnotab,
+ # These two arguments are reversed:
+ co.co_cellvars,
+ co.co_freevars,
+ ]
+ )
+ set_first_freevar_code = types.CodeType(*args)
+
+ def set_closure_cell(cell, value):
+ # Create a function using the set_first_freevar_code,
+ # whose first closure cell is `cell`. Calling it will
+ # change the value of that cell.
+ setter = types.FunctionType(
+ set_first_freevar_code, {}, "setter", (), (cell,)
+ )
+ # And call it to set the cell.
+ setter(value)
+
+ # Make sure it works on this interpreter:
+ def make_func_with_cell():
+ x = None
+
+ def func():
+ return x # pragma: no cover
+
+ return func
+
+ if PY2:
+ cell = make_func_with_cell().func_closure[0]
+ else:
+ cell = make_func_with_cell().__closure__[0]
+ set_closure_cell(cell, 100)
+ if cell.cell_contents != 100:
+ raise AssertionError # pragma: no cover
+
+ except Exception:
+ return just_warn
+ else:
+ return set_closure_cell
+
+
+set_closure_cell = make_set_closure_cell()
diff --git a/openpype/hosts/fusion/vendor/attr/_config.py b/openpype/hosts/fusion/vendor/attr/_config.py
new file mode 100644
index 0000000000..8ec920962d
--- /dev/null
+++ b/openpype/hosts/fusion/vendor/attr/_config.py
@@ -0,0 +1,23 @@
+from __future__ import absolute_import, division, print_function
+
+
+__all__ = ["set_run_validators", "get_run_validators"]
+
+_run_validators = True
+
+
+def set_run_validators(run):
+ """
+ Set whether or not validators are run. By default, they are run.
+ """
+ if not isinstance(run, bool):
+ raise TypeError("'run' must be bool.")
+ global _run_validators
+ _run_validators = run
+
+
+def get_run_validators():
+ """
+ Return whether or not validators are run.
+ """
+ return _run_validators
diff --git a/openpype/hosts/fusion/vendor/attr/_funcs.py b/openpype/hosts/fusion/vendor/attr/_funcs.py
new file mode 100644
index 0000000000..fda508c5c4
--- /dev/null
+++ b/openpype/hosts/fusion/vendor/attr/_funcs.py
@@ -0,0 +1,395 @@
+from __future__ import absolute_import, division, print_function
+
+import copy
+
+from ._compat import iteritems
+from ._make import NOTHING, _obj_setattr, fields
+from .exceptions import AttrsAttributeNotFoundError
+
+
+def asdict(
+ inst,
+ recurse=True,
+ filter=None,
+ dict_factory=dict,
+ retain_collection_types=False,
+ value_serializer=None,
+):
+ """
+ Return the ``attrs`` attribute values of *inst* as a dict.
+
+ Optionally recurse into other ``attrs``-decorated classes.
+
+ :param inst: Instance of an ``attrs``-decorated class.
+ :param bool recurse: Recurse into classes that are also
+ ``attrs``-decorated.
+ :param callable filter: A callable whose return code determines whether an
+ attribute or element is included (``True``) or dropped (``False``). Is
+ called with the `attr.Attribute` as the first argument and the
+ value as the second argument.
+ :param callable dict_factory: A callable to produce dictionaries from. For
+ example, to produce ordered dictionaries instead of normal Python
+ dictionaries, pass in ``collections.OrderedDict``.
+ :param bool retain_collection_types: Do not convert to ``list`` when
+ encountering an attribute whose type is ``tuple`` or ``set``. Only
+ meaningful if ``recurse`` is ``True``.
+ :param Optional[callable] value_serializer: A hook that is called for every
+ attribute or dict key/value. It receives the current instance, field
+ and value and must return the (updated) value. The hook is run *after*
+ the optional *filter* has been applied.
+
+ :rtype: return type of *dict_factory*
+
+ :raise attr.exceptions.NotAnAttrsClassError: If *cls* is not an ``attrs``
+ class.
+
+ .. versionadded:: 16.0.0 *dict_factory*
+ .. versionadded:: 16.1.0 *retain_collection_types*
+ .. versionadded:: 20.3.0 *value_serializer*
+ """
+ attrs = fields(inst.__class__)
+ rv = dict_factory()
+ for a in attrs:
+ v = getattr(inst, a.name)
+ if filter is not None and not filter(a, v):
+ continue
+
+ if value_serializer is not None:
+ v = value_serializer(inst, a, v)
+
+ if recurse is True:
+ if has(v.__class__):
+ rv[a.name] = asdict(
+ v,
+ True,
+ filter,
+ dict_factory,
+ retain_collection_types,
+ value_serializer,
+ )
+ elif isinstance(v, (tuple, list, set, frozenset)):
+ cf = v.__class__ if retain_collection_types is True else list
+ rv[a.name] = cf(
+ [
+ _asdict_anything(
+ i,
+ filter,
+ dict_factory,
+ retain_collection_types,
+ value_serializer,
+ )
+ for i in v
+ ]
+ )
+ elif isinstance(v, dict):
+ df = dict_factory
+ rv[a.name] = df(
+ (
+ _asdict_anything(
+ kk,
+ filter,
+ df,
+ retain_collection_types,
+ value_serializer,
+ ),
+ _asdict_anything(
+ vv,
+ filter,
+ df,
+ retain_collection_types,
+ value_serializer,
+ ),
+ )
+ for kk, vv in iteritems(v)
+ )
+ else:
+ rv[a.name] = v
+ else:
+ rv[a.name] = v
+ return rv
+
+
+def _asdict_anything(
+ val,
+ filter,
+ dict_factory,
+ retain_collection_types,
+ value_serializer,
+):
+ """
+ ``asdict`` only works on attrs instances, this works on anything.
+ """
+ if getattr(val.__class__, "__attrs_attrs__", None) is not None:
+ # Attrs class.
+ rv = asdict(
+ val,
+ True,
+ filter,
+ dict_factory,
+ retain_collection_types,
+ value_serializer,
+ )
+ elif isinstance(val, (tuple, list, set, frozenset)):
+ cf = val.__class__ if retain_collection_types is True else list
+ rv = cf(
+ [
+ _asdict_anything(
+ i,
+ filter,
+ dict_factory,
+ retain_collection_types,
+ value_serializer,
+ )
+ for i in val
+ ]
+ )
+ elif isinstance(val, dict):
+ df = dict_factory
+ rv = df(
+ (
+ _asdict_anything(
+ kk, filter, df, retain_collection_types, value_serializer
+ ),
+ _asdict_anything(
+ vv, filter, df, retain_collection_types, value_serializer
+ ),
+ )
+ for kk, vv in iteritems(val)
+ )
+ else:
+ rv = val
+ if value_serializer is not None:
+ rv = value_serializer(None, None, rv)
+
+ return rv
+
+
+def astuple(
+ inst,
+ recurse=True,
+ filter=None,
+ tuple_factory=tuple,
+ retain_collection_types=False,
+):
+ """
+ Return the ``attrs`` attribute values of *inst* as a tuple.
+
+ Optionally recurse into other ``attrs``-decorated classes.
+
+ :param inst: Instance of an ``attrs``-decorated class.
+ :param bool recurse: Recurse into classes that are also
+ ``attrs``-decorated.
+ :param callable filter: A callable whose return code determines whether an
+ attribute or element is included (``True``) or dropped (``False``). Is
+ called with the `attr.Attribute` as the first argument and the
+ value as the second argument.
+ :param callable tuple_factory: A callable to produce tuples from. For
+ example, to produce lists instead of tuples.
+ :param bool retain_collection_types: Do not convert to ``list``
+ or ``dict`` when encountering an attribute which type is
+ ``tuple``, ``dict`` or ``set``. Only meaningful if ``recurse`` is
+ ``True``.
+
+ :rtype: return type of *tuple_factory*
+
+ :raise attr.exceptions.NotAnAttrsClassError: If *cls* is not an ``attrs``
+ class.
+
+ .. versionadded:: 16.2.0
+ """
+ attrs = fields(inst.__class__)
+ rv = []
+ retain = retain_collection_types # Very long. :/
+ for a in attrs:
+ v = getattr(inst, a.name)
+ if filter is not None and not filter(a, v):
+ continue
+ if recurse is True:
+ if has(v.__class__):
+ rv.append(
+ astuple(
+ v,
+ recurse=True,
+ filter=filter,
+ tuple_factory=tuple_factory,
+ retain_collection_types=retain,
+ )
+ )
+ elif isinstance(v, (tuple, list, set, frozenset)):
+ cf = v.__class__ if retain is True else list
+ rv.append(
+ cf(
+ [
+ astuple(
+ j,
+ recurse=True,
+ filter=filter,
+ tuple_factory=tuple_factory,
+ retain_collection_types=retain,
+ )
+ if has(j.__class__)
+ else j
+ for j in v
+ ]
+ )
+ )
+ elif isinstance(v, dict):
+ df = v.__class__ if retain is True else dict
+ rv.append(
+ df(
+ (
+ astuple(
+ kk,
+ tuple_factory=tuple_factory,
+ retain_collection_types=retain,
+ )
+ if has(kk.__class__)
+ else kk,
+ astuple(
+ vv,
+ tuple_factory=tuple_factory,
+ retain_collection_types=retain,
+ )
+ if has(vv.__class__)
+ else vv,
+ )
+ for kk, vv in iteritems(v)
+ )
+ )
+ else:
+ rv.append(v)
+ else:
+ rv.append(v)
+
+ return rv if tuple_factory is list else tuple_factory(rv)
+
+
+def has(cls):
+ """
+ Check whether *cls* is a class with ``attrs`` attributes.
+
+ :param type cls: Class to introspect.
+ :raise TypeError: If *cls* is not a class.
+
+ :rtype: bool
+ """
+ return getattr(cls, "__attrs_attrs__", None) is not None
+
+
+def assoc(inst, **changes):
+ """
+ Copy *inst* and apply *changes*.
+
+ :param inst: Instance of a class with ``attrs`` attributes.
+ :param changes: Keyword changes in the new copy.
+
+ :return: A copy of inst with *changes* incorporated.
+
+ :raise attr.exceptions.AttrsAttributeNotFoundError: If *attr_name* couldn't
+ be found on *cls*.
+ :raise attr.exceptions.NotAnAttrsClassError: If *cls* is not an ``attrs``
+ class.
+
+ .. deprecated:: 17.1.0
+ Use `evolve` instead.
+ """
+ import warnings
+
+ warnings.warn(
+ "assoc is deprecated and will be removed after 2018/01.",
+ DeprecationWarning,
+ stacklevel=2,
+ )
+ new = copy.copy(inst)
+ attrs = fields(inst.__class__)
+ for k, v in iteritems(changes):
+ a = getattr(attrs, k, NOTHING)
+ if a is NOTHING:
+ raise AttrsAttributeNotFoundError(
+ "{k} is not an attrs attribute on {cl}.".format(
+ k=k, cl=new.__class__
+ )
+ )
+ _obj_setattr(new, k, v)
+ return new
+
+
+def evolve(inst, **changes):
+ """
+ Create a new instance, based on *inst* with *changes* applied.
+
+ :param inst: Instance of a class with ``attrs`` attributes.
+ :param changes: Keyword changes in the new copy.
+
+ :return: A copy of inst with *changes* incorporated.
+
+ :raise TypeError: If *attr_name* couldn't be found in the class
+ ``__init__``.
+ :raise attr.exceptions.NotAnAttrsClassError: If *cls* is not an ``attrs``
+ class.
+
+ .. versionadded:: 17.1.0
+ """
+ cls = inst.__class__
+ attrs = fields(cls)
+ for a in attrs:
+ if not a.init:
+ continue
+ attr_name = a.name # To deal with private attributes.
+ init_name = attr_name if attr_name[0] != "_" else attr_name[1:]
+ if init_name not in changes:
+ changes[init_name] = getattr(inst, attr_name)
+
+ return cls(**changes)
+
+
+def resolve_types(cls, globalns=None, localns=None, attribs=None):
+ """
+ Resolve any strings and forward annotations in type annotations.
+
+ This is only required if you need concrete types in `Attribute`'s *type*
+ field. In other words, you don't need to resolve your types if you only
+ use them for static type checking.
+
+ With no arguments, names will be looked up in the module in which the class
+ was created. If this is not what you want, e.g. if the name only exists
+ inside a method, you may pass *globalns* or *localns* to specify other
+ dictionaries in which to look up these names. See the docs of
+ `typing.get_type_hints` for more details.
+
+ :param type cls: Class to resolve.
+ :param Optional[dict] globalns: Dictionary containing global variables.
+ :param Optional[dict] localns: Dictionary containing local variables.
+ :param Optional[list] attribs: List of attribs for the given class.
+ This is necessary when calling from inside a ``field_transformer``
+ since *cls* is not an ``attrs`` class yet.
+
+ :raise TypeError: If *cls* is not a class.
+ :raise attr.exceptions.NotAnAttrsClassError: If *cls* is not an ``attrs``
+ class and you didn't pass any attribs.
+ :raise NameError: If types cannot be resolved because of missing variables.
+
+ :returns: *cls* so you can use this function also as a class decorator.
+ Please note that you have to apply it **after** `attr.s`. That means
+ the decorator has to come in the line **before** `attr.s`.
+
+ .. versionadded:: 20.1.0
+ .. versionadded:: 21.1.0 *attribs*
+
+ """
+ try:
+ # Since calling get_type_hints is expensive we cache whether we've
+ # done it already.
+ cls.__attrs_types_resolved__
+ except AttributeError:
+ import typing
+
+ hints = typing.get_type_hints(cls, globalns=globalns, localns=localns)
+ for field in fields(cls) if attribs is None else attribs:
+ if field.name in hints:
+ # Since fields have been frozen we must work around it.
+ _obj_setattr(field, "type", hints[field.name])
+ cls.__attrs_types_resolved__ = True
+
+ # Return the class so you can use it as a decorator too.
+ return cls
diff --git a/openpype/hosts/fusion/vendor/attr/_make.py b/openpype/hosts/fusion/vendor/attr/_make.py
new file mode 100644
index 0000000000..a1912b1233
--- /dev/null
+++ b/openpype/hosts/fusion/vendor/attr/_make.py
@@ -0,0 +1,3052 @@
+from __future__ import absolute_import, division, print_function
+
+import copy
+import inspect
+import linecache
+import sys
+import threading
+import uuid
+import warnings
+
+from operator import itemgetter
+
+from . import _config, setters
+from ._compat import (
+ PY2,
+ PYPY,
+ isclass,
+ iteritems,
+ metadata_proxy,
+ new_class,
+ ordered_dict,
+ set_closure_cell,
+)
+from .exceptions import (
+ DefaultAlreadySetError,
+ FrozenInstanceError,
+ NotAnAttrsClassError,
+ PythonTooOldError,
+ UnannotatedAttributeError,
+)
+
+
+if not PY2:
+ import typing
+
+
+# This is used at least twice, so cache it here.
+_obj_setattr = object.__setattr__
+_init_converter_pat = "__attr_converter_%s"
+_init_factory_pat = "__attr_factory_{}"
+_tuple_property_pat = (
+ " {attr_name} = _attrs_property(_attrs_itemgetter({index}))"
+)
+_classvar_prefixes = (
+ "typing.ClassVar",
+ "t.ClassVar",
+ "ClassVar",
+ "typing_extensions.ClassVar",
+)
+# we don't use a double-underscore prefix because that triggers
+# name mangling when trying to create a slot for the field
+# (when slots=True)
+_hash_cache_field = "_attrs_cached_hash"
+
+_empty_metadata_singleton = metadata_proxy({})
+
+# Unique object for unequivocal getattr() defaults.
+_sentinel = object()
+
+
+class _Nothing(object):
+ """
+ Sentinel class to indicate the lack of a value when ``None`` is ambiguous.
+
+ ``_Nothing`` is a singleton. There is only ever one of it.
+
+ .. versionchanged:: 21.1.0 ``bool(NOTHING)`` is now False.
+ """
+
+ _singleton = None
+
+ def __new__(cls):
+ if _Nothing._singleton is None:
+ _Nothing._singleton = super(_Nothing, cls).__new__(cls)
+ return _Nothing._singleton
+
+ def __repr__(self):
+ return "NOTHING"
+
+ def __bool__(self):
+ return False
+
+ def __len__(self):
+ return 0 # __bool__ for Python 2
+
+
+NOTHING = _Nothing()
+"""
+Sentinel to indicate the lack of a value when ``None`` is ambiguous.
+"""
+
+
+class _CacheHashWrapper(int):
+ """
+ An integer subclass that pickles / copies as None
+
+ This is used for non-slots classes with ``cache_hash=True``, to avoid
+ serializing a potentially (even likely) invalid hash value. Since ``None``
+ is the default value for uncalculated hashes, whenever this is copied,
+ the copy's value for the hash should automatically reset.
+
+ See GH #613 for more details.
+ """
+
+ if PY2:
+ # For some reason `type(None)` isn't callable in Python 2, but we don't
+ # actually need a constructor for None objects, we just need any
+ # available function that returns None.
+ def __reduce__(self, _none_constructor=getattr, _args=(0, "", None)):
+ return _none_constructor, _args
+
+ else:
+
+ def __reduce__(self, _none_constructor=type(None), _args=()):
+ return _none_constructor, _args
+
+
+def attrib(
+ default=NOTHING,
+ validator=None,
+ repr=True,
+ cmp=None,
+ hash=None,
+ init=True,
+ metadata=None,
+ type=None,
+ converter=None,
+ factory=None,
+ kw_only=False,
+ eq=None,
+ order=None,
+ on_setattr=None,
+):
+ """
+ Create a new attribute on a class.
+
+ .. warning::
+
+ Does *not* do anything unless the class is also decorated with
+ `attr.s`!
+
+ :param default: A value that is used if an ``attrs``-generated ``__init__``
+ is used and no value is passed while instantiating or the attribute is
+ excluded using ``init=False``.
+
+ If the value is an instance of `Factory`, its callable will be
+ used to construct a new value (useful for mutable data types like lists
+ or dicts).
+
+ If a default is not set (or set manually to `attr.NOTHING`), a value
+ *must* be supplied when instantiating; otherwise a `TypeError`
+ will be raised.
+
+ The default can also be set using decorator notation as shown below.
+
+ :type default: Any value
+
+ :param callable factory: Syntactic sugar for
+ ``default=attr.Factory(factory)``.
+
+ :param validator: `callable` that is called by ``attrs``-generated
+ ``__init__`` methods after the instance has been initialized. They
+ receive the initialized instance, the `Attribute`, and the
+ passed value.
+
+ The return value is *not* inspected so the validator has to throw an
+ exception itself.
+
+ If a `list` is passed, its items are treated as validators and must
+ all pass.
+
+ Validators can be globally disabled and re-enabled using
+ `get_run_validators`.
+
+ The validator can also be set using decorator notation as shown below.
+
+ :type validator: `callable` or a `list` of `callable`\\ s.
+
+ :param repr: Include this attribute in the generated ``__repr__``
+ method. If ``True``, include the attribute; if ``False``, omit it. By
+ default, the built-in ``repr()`` function is used. To override how the
+ attribute value is formatted, pass a ``callable`` that takes a single
+ value and returns a string. Note that the resulting string is used
+ as-is, i.e. it will be used directly *instead* of calling ``repr()``
+ (the default).
+ :type repr: a `bool` or a `callable` to use a custom function.
+
+ :param eq: If ``True`` (default), include this attribute in the
+ generated ``__eq__`` and ``__ne__`` methods that check two instances
+ for equality. To override how the attribute value is compared,
+ pass a ``callable`` that takes a single value and returns the value
+ to be compared.
+ :type eq: a `bool` or a `callable`.
+
+ :param order: If ``True`` (default), include this attributes in the
+ generated ``__lt__``, ``__le__``, ``__gt__`` and ``__ge__`` methods.
+ To override how the attribute value is ordered,
+ pass a ``callable`` that takes a single value and returns the value
+ to be ordered.
+ :type order: a `bool` or a `callable`.
+
+ :param cmp: Setting *cmp* is equivalent to setting *eq* and *order* to the
+ same value. Must not be mixed with *eq* or *order*.
+ :type cmp: a `bool` or a `callable`.
+
+ :param Optional[bool] hash: Include this attribute in the generated
+ ``__hash__`` method. If ``None`` (default), mirror *eq*'s value. This
+ is the correct behavior according the Python spec. Setting this value
+ to anything else than ``None`` is *discouraged*.
+ :param bool init: Include this attribute in the generated ``__init__``
+ method. It is possible to set this to ``False`` and set a default
+ value. In that case this attributed is unconditionally initialized
+ with the specified default value or factory.
+ :param callable converter: `callable` that is called by
+ ``attrs``-generated ``__init__`` methods to convert attribute's value
+ to the desired format. It is given the passed-in value, and the
+ returned value will be used as the new value of the attribute. The
+ value is converted before being passed to the validator, if any.
+ :param metadata: An arbitrary mapping, to be used by third-party
+ components. See `extending_metadata`.
+ :param type: The type of the attribute. In Python 3.6 or greater, the
+ preferred method to specify the type is using a variable annotation
+ (see `PEP 526 `_).
+ This argument is provided for backward compatibility.
+ Regardless of the approach used, the type will be stored on
+ ``Attribute.type``.
+
+ Please note that ``attrs`` doesn't do anything with this metadata by
+ itself. You can use it as part of your own code or for
+ `static type checking `.
+ :param kw_only: Make this attribute keyword-only (Python 3+)
+ in the generated ``__init__`` (if ``init`` is ``False``, this
+ parameter is ignored).
+ :param on_setattr: Allows to overwrite the *on_setattr* setting from
+ `attr.s`. If left `None`, the *on_setattr* value from `attr.s` is used.
+ Set to `attr.setters.NO_OP` to run **no** `setattr` hooks for this
+ attribute -- regardless of the setting in `attr.s`.
+ :type on_setattr: `callable`, or a list of callables, or `None`, or
+ `attr.setters.NO_OP`
+
+ .. versionadded:: 15.2.0 *convert*
+ .. versionadded:: 16.3.0 *metadata*
+ .. versionchanged:: 17.1.0 *validator* can be a ``list`` now.
+ .. versionchanged:: 17.1.0
+ *hash* is ``None`` and therefore mirrors *eq* by default.
+ .. versionadded:: 17.3.0 *type*
+ .. deprecated:: 17.4.0 *convert*
+ .. versionadded:: 17.4.0 *converter* as a replacement for the deprecated
+ *convert* to achieve consistency with other noun-based arguments.
+ .. versionadded:: 18.1.0
+ ``factory=f`` is syntactic sugar for ``default=attr.Factory(f)``.
+ .. versionadded:: 18.2.0 *kw_only*
+ .. versionchanged:: 19.2.0 *convert* keyword argument removed.
+ .. versionchanged:: 19.2.0 *repr* also accepts a custom callable.
+ .. deprecated:: 19.2.0 *cmp* Removal on or after 2021-06-01.
+ .. versionadded:: 19.2.0 *eq* and *order*
+ .. versionadded:: 20.1.0 *on_setattr*
+ .. versionchanged:: 20.3.0 *kw_only* backported to Python 2
+ .. versionchanged:: 21.1.0
+ *eq*, *order*, and *cmp* also accept a custom callable
+ .. versionchanged:: 21.1.0 *cmp* undeprecated
+ """
+ eq, eq_key, order, order_key = _determine_attrib_eq_order(
+ cmp, eq, order, True
+ )
+
+ if hash is not None and hash is not True and hash is not False:
+ raise TypeError(
+ "Invalid value for hash. Must be True, False, or None."
+ )
+
+ if factory is not None:
+ if default is not NOTHING:
+ raise ValueError(
+ "The `default` and `factory` arguments are mutually "
+ "exclusive."
+ )
+ if not callable(factory):
+ raise ValueError("The `factory` argument must be a callable.")
+ default = Factory(factory)
+
+ if metadata is None:
+ metadata = {}
+
+ # Apply syntactic sugar by auto-wrapping.
+ if isinstance(on_setattr, (list, tuple)):
+ on_setattr = setters.pipe(*on_setattr)
+
+ if validator and isinstance(validator, (list, tuple)):
+ validator = and_(*validator)
+
+ if converter and isinstance(converter, (list, tuple)):
+ converter = pipe(*converter)
+
+ return _CountingAttr(
+ default=default,
+ validator=validator,
+ repr=repr,
+ cmp=None,
+ hash=hash,
+ init=init,
+ converter=converter,
+ metadata=metadata,
+ type=type,
+ kw_only=kw_only,
+ eq=eq,
+ eq_key=eq_key,
+ order=order,
+ order_key=order_key,
+ on_setattr=on_setattr,
+ )
+
+
+def _compile_and_eval(script, globs, locs=None, filename=""):
+ """
+ "Exec" the script with the given global (globs) and local (locs) variables.
+ """
+ bytecode = compile(script, filename, "exec")
+ eval(bytecode, globs, locs)
+
+
+def _make_method(name, script, filename, globs=None):
+ """
+ Create the method with the script given and return the method object.
+ """
+ locs = {}
+ if globs is None:
+ globs = {}
+
+ _compile_and_eval(script, globs, locs, filename)
+
+ # In order of debuggers like PDB being able to step through the code,
+ # we add a fake linecache entry.
+ linecache.cache[filename] = (
+ len(script),
+ None,
+ script.splitlines(True),
+ filename,
+ )
+
+ return locs[name]
+
+
+def _make_attr_tuple_class(cls_name, attr_names):
+ """
+ Create a tuple subclass to hold `Attribute`s for an `attrs` class.
+
+ The subclass is a bare tuple with properties for names.
+
+ class MyClassAttributes(tuple):
+ __slots__ = ()
+ x = property(itemgetter(0))
+ """
+ attr_class_name = "{}Attributes".format(cls_name)
+ attr_class_template = [
+ "class {}(tuple):".format(attr_class_name),
+ " __slots__ = ()",
+ ]
+ if attr_names:
+ for i, attr_name in enumerate(attr_names):
+ attr_class_template.append(
+ _tuple_property_pat.format(index=i, attr_name=attr_name)
+ )
+ else:
+ attr_class_template.append(" pass")
+ globs = {"_attrs_itemgetter": itemgetter, "_attrs_property": property}
+ _compile_and_eval("\n".join(attr_class_template), globs)
+ return globs[attr_class_name]
+
+
+# Tuple class for extracted attributes from a class definition.
+# `base_attrs` is a subset of `attrs`.
+_Attributes = _make_attr_tuple_class(
+ "_Attributes",
+ [
+ # all attributes to build dunder methods for
+ "attrs",
+ # attributes that have been inherited
+ "base_attrs",
+ # map inherited attributes to their originating classes
+ "base_attrs_map",
+ ],
+)
+
+
+def _is_class_var(annot):
+ """
+ Check whether *annot* is a typing.ClassVar.
+
+ The string comparison hack is used to avoid evaluating all string
+ annotations which would put attrs-based classes at a performance
+ disadvantage compared to plain old classes.
+ """
+ annot = str(annot)
+
+ # Annotation can be quoted.
+ if annot.startswith(("'", '"')) and annot.endswith(("'", '"')):
+ annot = annot[1:-1]
+
+ return annot.startswith(_classvar_prefixes)
+
+
+def _has_own_attribute(cls, attrib_name):
+ """
+ Check whether *cls* defines *attrib_name* (and doesn't just inherit it).
+
+ Requires Python 3.
+ """
+ attr = getattr(cls, attrib_name, _sentinel)
+ if attr is _sentinel:
+ return False
+
+ for base_cls in cls.__mro__[1:]:
+ a = getattr(base_cls, attrib_name, None)
+ if attr is a:
+ return False
+
+ return True
+
+
+def _get_annotations(cls):
+ """
+ Get annotations for *cls*.
+ """
+ if _has_own_attribute(cls, "__annotations__"):
+ return cls.__annotations__
+
+ return {}
+
+
+def _counter_getter(e):
+ """
+ Key function for sorting to avoid re-creating a lambda for every class.
+ """
+ return e[1].counter
+
+
+def _collect_base_attrs(cls, taken_attr_names):
+ """
+ Collect attr.ibs from base classes of *cls*, except *taken_attr_names*.
+ """
+ base_attrs = []
+ base_attr_map = {} # A dictionary of base attrs to their classes.
+
+ # Traverse the MRO and collect attributes.
+ for base_cls in reversed(cls.__mro__[1:-1]):
+ for a in getattr(base_cls, "__attrs_attrs__", []):
+ if a.inherited or a.name in taken_attr_names:
+ continue
+
+ a = a.evolve(inherited=True)
+ base_attrs.append(a)
+ base_attr_map[a.name] = base_cls
+
+ # For each name, only keep the freshest definition i.e. the furthest at the
+ # back. base_attr_map is fine because it gets overwritten with every new
+ # instance.
+ filtered = []
+ seen = set()
+ for a in reversed(base_attrs):
+ if a.name in seen:
+ continue
+ filtered.insert(0, a)
+ seen.add(a.name)
+
+ return filtered, base_attr_map
+
+
+def _collect_base_attrs_broken(cls, taken_attr_names):
+ """
+ Collect attr.ibs from base classes of *cls*, except *taken_attr_names*.
+
+ N.B. *taken_attr_names* will be mutated.
+
+ Adhere to the old incorrect behavior.
+
+ Notably it collects from the front and considers inherited attributes which
+ leads to the buggy behavior reported in #428.
+ """
+ base_attrs = []
+ base_attr_map = {} # A dictionary of base attrs to their classes.
+
+ # Traverse the MRO and collect attributes.
+ for base_cls in cls.__mro__[1:-1]:
+ for a in getattr(base_cls, "__attrs_attrs__", []):
+ if a.name in taken_attr_names:
+ continue
+
+ a = a.evolve(inherited=True)
+ taken_attr_names.add(a.name)
+ base_attrs.append(a)
+ base_attr_map[a.name] = base_cls
+
+ return base_attrs, base_attr_map
+
+
+def _transform_attrs(
+ cls, these, auto_attribs, kw_only, collect_by_mro, field_transformer
+):
+ """
+ Transform all `_CountingAttr`s on a class into `Attribute`s.
+
+ If *these* is passed, use that and don't look for them on the class.
+
+ *collect_by_mro* is True, collect them in the correct MRO order, otherwise
+ use the old -- incorrect -- order. See #428.
+
+ Return an `_Attributes`.
+ """
+ cd = cls.__dict__
+ anns = _get_annotations(cls)
+
+ if these is not None:
+ ca_list = [(name, ca) for name, ca in iteritems(these)]
+
+ if not isinstance(these, ordered_dict):
+ ca_list.sort(key=_counter_getter)
+ elif auto_attribs is True:
+ ca_names = {
+ name
+ for name, attr in cd.items()
+ if isinstance(attr, _CountingAttr)
+ }
+ ca_list = []
+ annot_names = set()
+ for attr_name, type in anns.items():
+ if _is_class_var(type):
+ continue
+ annot_names.add(attr_name)
+ a = cd.get(attr_name, NOTHING)
+
+ if not isinstance(a, _CountingAttr):
+ if a is NOTHING:
+ a = attrib()
+ else:
+ a = attrib(default=a)
+ ca_list.append((attr_name, a))
+
+ unannotated = ca_names - annot_names
+ if len(unannotated) > 0:
+ raise UnannotatedAttributeError(
+ "The following `attr.ib`s lack a type annotation: "
+ + ", ".join(
+ sorted(unannotated, key=lambda n: cd.get(n).counter)
+ )
+ + "."
+ )
+ else:
+ ca_list = sorted(
+ (
+ (name, attr)
+ for name, attr in cd.items()
+ if isinstance(attr, _CountingAttr)
+ ),
+ key=lambda e: e[1].counter,
+ )
+
+ own_attrs = [
+ Attribute.from_counting_attr(
+ name=attr_name, ca=ca, type=anns.get(attr_name)
+ )
+ for attr_name, ca in ca_list
+ ]
+
+ if collect_by_mro:
+ base_attrs, base_attr_map = _collect_base_attrs(
+ cls, {a.name for a in own_attrs}
+ )
+ else:
+ base_attrs, base_attr_map = _collect_base_attrs_broken(
+ cls, {a.name for a in own_attrs}
+ )
+
+ attr_names = [a.name for a in base_attrs + own_attrs]
+
+ AttrsClass = _make_attr_tuple_class(cls.__name__, attr_names)
+
+ if kw_only:
+ own_attrs = [a.evolve(kw_only=True) for a in own_attrs]
+ base_attrs = [a.evolve(kw_only=True) for a in base_attrs]
+
+ attrs = AttrsClass(base_attrs + own_attrs)
+
+ # Mandatory vs non-mandatory attr order only matters when they are part of
+ # the __init__ signature and when they aren't kw_only (which are moved to
+ # the end and can be mandatory or non-mandatory in any order, as they will
+ # be specified as keyword args anyway). Check the order of those attrs:
+ had_default = False
+ for a in (a for a in attrs if a.init is not False and a.kw_only is False):
+ if had_default is True and a.default is NOTHING:
+ raise ValueError(
+ "No mandatory attributes allowed after an attribute with a "
+ "default value or factory. Attribute in question: %r" % (a,)
+ )
+
+ if had_default is False and a.default is not NOTHING:
+ had_default = True
+
+ if field_transformer is not None:
+ attrs = field_transformer(cls, attrs)
+ return _Attributes((attrs, base_attrs, base_attr_map))
+
+
+if PYPY:
+
+ def _frozen_setattrs(self, name, value):
+ """
+ Attached to frozen classes as __setattr__.
+ """
+ if isinstance(self, BaseException) and name in (
+ "__cause__",
+ "__context__",
+ ):
+ BaseException.__setattr__(self, name, value)
+ return
+
+ raise FrozenInstanceError()
+
+
+else:
+
+ def _frozen_setattrs(self, name, value):
+ """
+ Attached to frozen classes as __setattr__.
+ """
+ raise FrozenInstanceError()
+
+
+def _frozen_delattrs(self, name):
+ """
+ Attached to frozen classes as __delattr__.
+ """
+ raise FrozenInstanceError()
+
+
+class _ClassBuilder(object):
+ """
+ Iteratively build *one* class.
+ """
+
+ __slots__ = (
+ "_attr_names",
+ "_attrs",
+ "_base_attr_map",
+ "_base_names",
+ "_cache_hash",
+ "_cls",
+ "_cls_dict",
+ "_delete_attribs",
+ "_frozen",
+ "_has_pre_init",
+ "_has_post_init",
+ "_is_exc",
+ "_on_setattr",
+ "_slots",
+ "_weakref_slot",
+ "_has_own_setattr",
+ "_has_custom_setattr",
+ )
+
+ def __init__(
+ self,
+ cls,
+ these,
+ slots,
+ frozen,
+ weakref_slot,
+ getstate_setstate,
+ auto_attribs,
+ kw_only,
+ cache_hash,
+ is_exc,
+ collect_by_mro,
+ on_setattr,
+ has_custom_setattr,
+ field_transformer,
+ ):
+ attrs, base_attrs, base_map = _transform_attrs(
+ cls,
+ these,
+ auto_attribs,
+ kw_only,
+ collect_by_mro,
+ field_transformer,
+ )
+
+ self._cls = cls
+ self._cls_dict = dict(cls.__dict__) if slots else {}
+ self._attrs = attrs
+ self._base_names = set(a.name for a in base_attrs)
+ self._base_attr_map = base_map
+ self._attr_names = tuple(a.name for a in attrs)
+ self._slots = slots
+ self._frozen = frozen
+ self._weakref_slot = weakref_slot
+ self._cache_hash = cache_hash
+ self._has_pre_init = bool(getattr(cls, "__attrs_pre_init__", False))
+ self._has_post_init = bool(getattr(cls, "__attrs_post_init__", False))
+ self._delete_attribs = not bool(these)
+ self._is_exc = is_exc
+ self._on_setattr = on_setattr
+
+ self._has_custom_setattr = has_custom_setattr
+ self._has_own_setattr = False
+
+ self._cls_dict["__attrs_attrs__"] = self._attrs
+
+ if frozen:
+ self._cls_dict["__setattr__"] = _frozen_setattrs
+ self._cls_dict["__delattr__"] = _frozen_delattrs
+
+ self._has_own_setattr = True
+
+ if getstate_setstate:
+ (
+ self._cls_dict["__getstate__"],
+ self._cls_dict["__setstate__"],
+ ) = self._make_getstate_setstate()
+
+ def __repr__(self):
+ return "<_ClassBuilder(cls={cls})>".format(cls=self._cls.__name__)
+
+ def build_class(self):
+ """
+ Finalize class based on the accumulated configuration.
+
+ Builder cannot be used after calling this method.
+ """
+ if self._slots is True:
+ return self._create_slots_class()
+ else:
+ return self._patch_original_class()
+
+ def _patch_original_class(self):
+ """
+ Apply accumulated methods and return the class.
+ """
+ cls = self._cls
+ base_names = self._base_names
+
+ # Clean class of attribute definitions (`attr.ib()`s).
+ if self._delete_attribs:
+ for name in self._attr_names:
+ if (
+ name not in base_names
+ and getattr(cls, name, _sentinel) is not _sentinel
+ ):
+ try:
+ delattr(cls, name)
+ except AttributeError:
+ # This can happen if a base class defines a class
+ # variable and we want to set an attribute with the
+ # same name by using only a type annotation.
+ pass
+
+ # Attach our dunder methods.
+ for name, value in self._cls_dict.items():
+ setattr(cls, name, value)
+
+ # If we've inherited an attrs __setattr__ and don't write our own,
+ # reset it to object's.
+ if not self._has_own_setattr and getattr(
+ cls, "__attrs_own_setattr__", False
+ ):
+ cls.__attrs_own_setattr__ = False
+
+ if not self._has_custom_setattr:
+ cls.__setattr__ = object.__setattr__
+
+ return cls
+
+ def _create_slots_class(self):
+ """
+ Build and return a new class with a `__slots__` attribute.
+ """
+ cd = {
+ k: v
+ for k, v in iteritems(self._cls_dict)
+ if k not in tuple(self._attr_names) + ("__dict__", "__weakref__")
+ }
+
+ # If our class doesn't have its own implementation of __setattr__
+ # (either from the user or by us), check the bases, if one of them has
+ # an attrs-made __setattr__, that needs to be reset. We don't walk the
+ # MRO because we only care about our immediate base classes.
+ # XXX: This can be confused by subclassing a slotted attrs class with
+ # XXX: a non-attrs class and subclass the resulting class with an attrs
+ # XXX: class. See `test_slotted_confused` for details. For now that's
+ # XXX: OK with us.
+ if not self._has_own_setattr:
+ cd["__attrs_own_setattr__"] = False
+
+ if not self._has_custom_setattr:
+ for base_cls in self._cls.__bases__:
+ if base_cls.__dict__.get("__attrs_own_setattr__", False):
+ cd["__setattr__"] = object.__setattr__
+ break
+
+ # Traverse the MRO to collect existing slots
+ # and check for an existing __weakref__.
+ existing_slots = dict()
+ weakref_inherited = False
+ for base_cls in self._cls.__mro__[1:-1]:
+ if base_cls.__dict__.get("__weakref__", None) is not None:
+ weakref_inherited = True
+ existing_slots.update(
+ {
+ name: getattr(base_cls, name)
+ for name in getattr(base_cls, "__slots__", [])
+ }
+ )
+
+ base_names = set(self._base_names)
+
+ names = self._attr_names
+ if (
+ self._weakref_slot
+ and "__weakref__" not in getattr(self._cls, "__slots__", ())
+ and "__weakref__" not in names
+ and not weakref_inherited
+ ):
+ names += ("__weakref__",)
+
+ # We only add the names of attributes that aren't inherited.
+ # Setting __slots__ to inherited attributes wastes memory.
+ slot_names = [name for name in names if name not in base_names]
+ # There are slots for attributes from current class
+ # that are defined in parent classes.
+ # As their descriptors may be overriden by a child class,
+ # we collect them here and update the class dict
+ reused_slots = {
+ slot: slot_descriptor
+ for slot, slot_descriptor in iteritems(existing_slots)
+ if slot in slot_names
+ }
+ slot_names = [name for name in slot_names if name not in reused_slots]
+ cd.update(reused_slots)
+ if self._cache_hash:
+ slot_names.append(_hash_cache_field)
+ cd["__slots__"] = tuple(slot_names)
+
+ qualname = getattr(self._cls, "__qualname__", None)
+ if qualname is not None:
+ cd["__qualname__"] = qualname
+
+ # Create new class based on old class and our methods.
+ cls = type(self._cls)(self._cls.__name__, self._cls.__bases__, cd)
+
+ # The following is a fix for
+ # https://github.com/python-attrs/attrs/issues/102. On Python 3,
+ # if a method mentions `__class__` or uses the no-arg super(), the
+ # compiler will bake a reference to the class in the method itself
+ # as `method.__closure__`. Since we replace the class with a
+ # clone, we rewrite these references so it keeps working.
+ for item in cls.__dict__.values():
+ if isinstance(item, (classmethod, staticmethod)):
+ # Class- and staticmethods hide their functions inside.
+ # These might need to be rewritten as well.
+ closure_cells = getattr(item.__func__, "__closure__", None)
+ elif isinstance(item, property):
+ # Workaround for property `super()` shortcut (PY3-only).
+ # There is no universal way for other descriptors.
+ closure_cells = getattr(item.fget, "__closure__", None)
+ else:
+ closure_cells = getattr(item, "__closure__", None)
+
+ if not closure_cells: # Catch None or the empty list.
+ continue
+ for cell in closure_cells:
+ try:
+ match = cell.cell_contents is self._cls
+ except ValueError: # ValueError: Cell is empty
+ pass
+ else:
+ if match:
+ set_closure_cell(cell, cls)
+
+ return cls
+
+ def add_repr(self, ns):
+ self._cls_dict["__repr__"] = self._add_method_dunders(
+ _make_repr(self._attrs, ns=ns)
+ )
+ return self
+
+ def add_str(self):
+ repr = self._cls_dict.get("__repr__")
+ if repr is None:
+ raise ValueError(
+ "__str__ can only be generated if a __repr__ exists."
+ )
+
+ def __str__(self):
+ return self.__repr__()
+
+ self._cls_dict["__str__"] = self._add_method_dunders(__str__)
+ return self
+
+ def _make_getstate_setstate(self):
+ """
+ Create custom __setstate__ and __getstate__ methods.
+ """
+ # __weakref__ is not writable.
+ state_attr_names = tuple(
+ an for an in self._attr_names if an != "__weakref__"
+ )
+
+ def slots_getstate(self):
+ """
+ Automatically created by attrs.
+ """
+ return tuple(getattr(self, name) for name in state_attr_names)
+
+ hash_caching_enabled = self._cache_hash
+
+ def slots_setstate(self, state):
+ """
+ Automatically created by attrs.
+ """
+ __bound_setattr = _obj_setattr.__get__(self, Attribute)
+ for name, value in zip(state_attr_names, state):
+ __bound_setattr(name, value)
+
+ # The hash code cache is not included when the object is
+ # serialized, but it still needs to be initialized to None to
+ # indicate that the first call to __hash__ should be a cache
+ # miss.
+ if hash_caching_enabled:
+ __bound_setattr(_hash_cache_field, None)
+
+ return slots_getstate, slots_setstate
+
+ def make_unhashable(self):
+ self._cls_dict["__hash__"] = None
+ return self
+
+ def add_hash(self):
+ self._cls_dict["__hash__"] = self._add_method_dunders(
+ _make_hash(
+ self._cls,
+ self._attrs,
+ frozen=self._frozen,
+ cache_hash=self._cache_hash,
+ )
+ )
+
+ return self
+
+ def add_init(self):
+ self._cls_dict["__init__"] = self._add_method_dunders(
+ _make_init(
+ self._cls,
+ self._attrs,
+ self._has_pre_init,
+ self._has_post_init,
+ self._frozen,
+ self._slots,
+ self._cache_hash,
+ self._base_attr_map,
+ self._is_exc,
+ self._on_setattr is not None
+ and self._on_setattr is not setters.NO_OP,
+ attrs_init=False,
+ )
+ )
+
+ return self
+
+ def add_attrs_init(self):
+ self._cls_dict["__attrs_init__"] = self._add_method_dunders(
+ _make_init(
+ self._cls,
+ self._attrs,
+ self._has_pre_init,
+ self._has_post_init,
+ self._frozen,
+ self._slots,
+ self._cache_hash,
+ self._base_attr_map,
+ self._is_exc,
+ self._on_setattr is not None
+ and self._on_setattr is not setters.NO_OP,
+ attrs_init=True,
+ )
+ )
+
+ return self
+
+ def add_eq(self):
+ cd = self._cls_dict
+
+ cd["__eq__"] = self._add_method_dunders(
+ _make_eq(self._cls, self._attrs)
+ )
+ cd["__ne__"] = self._add_method_dunders(_make_ne())
+
+ return self
+
+ def add_order(self):
+ cd = self._cls_dict
+
+ cd["__lt__"], cd["__le__"], cd["__gt__"], cd["__ge__"] = (
+ self._add_method_dunders(meth)
+ for meth in _make_order(self._cls, self._attrs)
+ )
+
+ return self
+
+ def add_setattr(self):
+ if self._frozen:
+ return self
+
+ sa_attrs = {}
+ for a in self._attrs:
+ on_setattr = a.on_setattr or self._on_setattr
+ if on_setattr and on_setattr is not setters.NO_OP:
+ sa_attrs[a.name] = a, on_setattr
+
+ if not sa_attrs:
+ return self
+
+ if self._has_custom_setattr:
+ # We need to write a __setattr__ but there already is one!
+ raise ValueError(
+ "Can't combine custom __setattr__ with on_setattr hooks."
+ )
+
+ # docstring comes from _add_method_dunders
+ def __setattr__(self, name, val):
+ try:
+ a, hook = sa_attrs[name]
+ except KeyError:
+ nval = val
+ else:
+ nval = hook(self, a, val)
+
+ _obj_setattr(self, name, nval)
+
+ self._cls_dict["__attrs_own_setattr__"] = True
+ self._cls_dict["__setattr__"] = self._add_method_dunders(__setattr__)
+ self._has_own_setattr = True
+
+ return self
+
+ def _add_method_dunders(self, method):
+ """
+ Add __module__ and __qualname__ to a *method* if possible.
+ """
+ try:
+ method.__module__ = self._cls.__module__
+ except AttributeError:
+ pass
+
+ try:
+ method.__qualname__ = ".".join(
+ (self._cls.__qualname__, method.__name__)
+ )
+ except AttributeError:
+ pass
+
+ try:
+ method.__doc__ = "Method generated by attrs for class %s." % (
+ self._cls.__qualname__,
+ )
+ except AttributeError:
+ pass
+
+ return method
+
+
+_CMP_DEPRECATION = (
+ "The usage of `cmp` is deprecated and will be removed on or after "
+ "2021-06-01. Please use `eq` and `order` instead."
+)
+
+
+def _determine_attrs_eq_order(cmp, eq, order, default_eq):
+ """
+ Validate the combination of *cmp*, *eq*, and *order*. Derive the effective
+ values of eq and order. If *eq* is None, set it to *default_eq*.
+ """
+ if cmp is not None and any((eq is not None, order is not None)):
+ raise ValueError("Don't mix `cmp` with `eq' and `order`.")
+
+ # cmp takes precedence due to bw-compatibility.
+ if cmp is not None:
+ return cmp, cmp
+
+ # If left None, equality is set to the specified default and ordering
+ # mirrors equality.
+ if eq is None:
+ eq = default_eq
+
+ if order is None:
+ order = eq
+
+ if eq is False and order is True:
+ raise ValueError("`order` can only be True if `eq` is True too.")
+
+ return eq, order
+
+
+def _determine_attrib_eq_order(cmp, eq, order, default_eq):
+ """
+ Validate the combination of *cmp*, *eq*, and *order*. Derive the effective
+ values of eq and order. If *eq* is None, set it to *default_eq*.
+ """
+ if cmp is not None and any((eq is not None, order is not None)):
+ raise ValueError("Don't mix `cmp` with `eq' and `order`.")
+
+ def decide_callable_or_boolean(value):
+ """
+ Decide whether a key function is used.
+ """
+ if callable(value):
+ value, key = True, value
+ else:
+ key = None
+ return value, key
+
+ # cmp takes precedence due to bw-compatibility.
+ if cmp is not None:
+ cmp, cmp_key = decide_callable_or_boolean(cmp)
+ return cmp, cmp_key, cmp, cmp_key
+
+ # If left None, equality is set to the specified default and ordering
+ # mirrors equality.
+ if eq is None:
+ eq, eq_key = default_eq, None
+ else:
+ eq, eq_key = decide_callable_or_boolean(eq)
+
+ if order is None:
+ order, order_key = eq, eq_key
+ else:
+ order, order_key = decide_callable_or_boolean(order)
+
+ if eq is False and order is True:
+ raise ValueError("`order` can only be True if `eq` is True too.")
+
+ return eq, eq_key, order, order_key
+
+
+def _determine_whether_to_implement(
+ cls, flag, auto_detect, dunders, default=True
+):
+ """
+ Check whether we should implement a set of methods for *cls*.
+
+ *flag* is the argument passed into @attr.s like 'init', *auto_detect* the
+ same as passed into @attr.s and *dunders* is a tuple of attribute names
+ whose presence signal that the user has implemented it themselves.
+
+ Return *default* if no reason for either for or against is found.
+
+ auto_detect must be False on Python 2.
+ """
+ if flag is True or flag is False:
+ return flag
+
+ if flag is None and auto_detect is False:
+ return default
+
+ # Logically, flag is None and auto_detect is True here.
+ for dunder in dunders:
+ if _has_own_attribute(cls, dunder):
+ return False
+
+ return default
+
+
+def attrs(
+ maybe_cls=None,
+ these=None,
+ repr_ns=None,
+ repr=None,
+ cmp=None,
+ hash=None,
+ init=None,
+ slots=False,
+ frozen=False,
+ weakref_slot=True,
+ str=False,
+ auto_attribs=False,
+ kw_only=False,
+ cache_hash=False,
+ auto_exc=False,
+ eq=None,
+ order=None,
+ auto_detect=False,
+ collect_by_mro=False,
+ getstate_setstate=None,
+ on_setattr=None,
+ field_transformer=None,
+):
+ r"""
+ A class decorator that adds `dunder
+ `_\ -methods according to the
+ specified attributes using `attr.ib` or the *these* argument.
+
+ :param these: A dictionary of name to `attr.ib` mappings. This is
+ useful to avoid the definition of your attributes within the class body
+ because you can't (e.g. if you want to add ``__repr__`` methods to
+ Django models) or don't want to.
+
+ If *these* is not ``None``, ``attrs`` will *not* search the class body
+ for attributes and will *not* remove any attributes from it.
+
+ If *these* is an ordered dict (`dict` on Python 3.6+,
+ `collections.OrderedDict` otherwise), the order is deduced from
+ the order of the attributes inside *these*. Otherwise the order
+ of the definition of the attributes is used.
+
+ :type these: `dict` of `str` to `attr.ib`
+
+ :param str repr_ns: When using nested classes, there's no way in Python 2
+ to automatically detect that. Therefore it's possible to set the
+ namespace explicitly for a more meaningful ``repr`` output.
+ :param bool auto_detect: Instead of setting the *init*, *repr*, *eq*,
+ *order*, and *hash* arguments explicitly, assume they are set to
+ ``True`` **unless any** of the involved methods for one of the
+ arguments is implemented in the *current* class (i.e. it is *not*
+ inherited from some base class).
+
+ So for example by implementing ``__eq__`` on a class yourself,
+ ``attrs`` will deduce ``eq=False`` and will create *neither*
+ ``__eq__`` *nor* ``__ne__`` (but Python classes come with a sensible
+ ``__ne__`` by default, so it *should* be enough to only implement
+ ``__eq__`` in most cases).
+
+ .. warning::
+
+ If you prevent ``attrs`` from creating the ordering methods for you
+ (``order=False``, e.g. by implementing ``__le__``), it becomes
+ *your* responsibility to make sure its ordering is sound. The best
+ way is to use the `functools.total_ordering` decorator.
+
+
+ Passing ``True`` or ``False`` to *init*, *repr*, *eq*, *order*,
+ *cmp*, or *hash* overrides whatever *auto_detect* would determine.
+
+ *auto_detect* requires Python 3. Setting it ``True`` on Python 2 raises
+ a `PythonTooOldError`.
+
+ :param bool repr: Create a ``__repr__`` method with a human readable
+ representation of ``attrs`` attributes..
+ :param bool str: Create a ``__str__`` method that is identical to
+ ``__repr__``. This is usually not necessary except for
+ `Exception`\ s.
+ :param Optional[bool] eq: If ``True`` or ``None`` (default), add ``__eq__``
+ and ``__ne__`` methods that check two instances for equality.
+
+ They compare the instances as if they were tuples of their ``attrs``
+ attributes if and only if the types of both classes are *identical*!
+ :param Optional[bool] order: If ``True``, add ``__lt__``, ``__le__``,
+ ``__gt__``, and ``__ge__`` methods that behave like *eq* above and
+ allow instances to be ordered. If ``None`` (default) mirror value of
+ *eq*.
+ :param Optional[bool] cmp: Setting *cmp* is equivalent to setting *eq*
+ and *order* to the same value. Must not be mixed with *eq* or *order*.
+ :param Optional[bool] hash: If ``None`` (default), the ``__hash__`` method
+ is generated according how *eq* and *frozen* are set.
+
+ 1. If *both* are True, ``attrs`` will generate a ``__hash__`` for you.
+ 2. If *eq* is True and *frozen* is False, ``__hash__`` will be set to
+ None, marking it unhashable (which it is).
+ 3. If *eq* is False, ``__hash__`` will be left untouched meaning the
+ ``__hash__`` method of the base class will be used (if base class is
+ ``object``, this means it will fall back to id-based hashing.).
+
+ Although not recommended, you can decide for yourself and force
+ ``attrs`` to create one (e.g. if the class is immutable even though you
+ didn't freeze it programmatically) by passing ``True`` or not. Both of
+ these cases are rather special and should be used carefully.
+
+ See our documentation on `hashing`, Python's documentation on
+ `object.__hash__`, and the `GitHub issue that led to the default \
+ behavior `_ for more
+ details.
+ :param bool init: Create a ``__init__`` method that initializes the
+ ``attrs`` attributes. Leading underscores are stripped for the argument
+ name. If a ``__attrs_pre_init__`` method exists on the class, it will
+ be called before the class is initialized. If a ``__attrs_post_init__``
+ method exists on the class, it will be called after the class is fully
+ initialized.
+
+ If ``init`` is ``False``, an ``__attrs_init__`` method will be
+ injected instead. This allows you to define a custom ``__init__``
+ method that can do pre-init work such as ``super().__init__()``,
+ and then call ``__attrs_init__()`` and ``__attrs_post_init__()``.
+ :param bool slots: Create a `slotted class ` that's more
+ memory-efficient. Slotted classes are generally superior to the default
+ dict classes, but have some gotchas you should know about, so we
+ encourage you to read the `glossary entry `.
+ :param bool frozen: Make instances immutable after initialization. If
+ someone attempts to modify a frozen instance,
+ `attr.exceptions.FrozenInstanceError` is raised.
+
+ .. note::
+
+ 1. This is achieved by installing a custom ``__setattr__`` method
+ on your class, so you can't implement your own.
+
+ 2. True immutability is impossible in Python.
+
+ 3. This *does* have a minor a runtime performance `impact
+ ` when initializing new instances. In other words:
+ ``__init__`` is slightly slower with ``frozen=True``.
+
+ 4. If a class is frozen, you cannot modify ``self`` in
+ ``__attrs_post_init__`` or a self-written ``__init__``. You can
+ circumvent that limitation by using
+ ``object.__setattr__(self, "attribute_name", value)``.
+
+ 5. Subclasses of a frozen class are frozen too.
+
+ :param bool weakref_slot: Make instances weak-referenceable. This has no
+ effect unless ``slots`` is also enabled.
+ :param bool auto_attribs: If ``True``, collect `PEP 526`_-annotated
+ attributes (Python 3.6 and later only) from the class body.
+
+ In this case, you **must** annotate every field. If ``attrs``
+ encounters a field that is set to an `attr.ib` but lacks a type
+ annotation, an `attr.exceptions.UnannotatedAttributeError` is
+ raised. Use ``field_name: typing.Any = attr.ib(...)`` if you don't
+ want to set a type.
+
+ If you assign a value to those attributes (e.g. ``x: int = 42``), that
+ value becomes the default value like if it were passed using
+ ``attr.ib(default=42)``. Passing an instance of `Factory` also
+ works as expected in most cases (see warning below).
+
+ Attributes annotated as `typing.ClassVar`, and attributes that are
+ neither annotated nor set to an `attr.ib` are **ignored**.
+
+ .. warning::
+ For features that use the attribute name to create decorators (e.g.
+ `validators `), you still *must* assign `attr.ib` to
+ them. Otherwise Python will either not find the name or try to use
+ the default value to call e.g. ``validator`` on it.
+
+ These errors can be quite confusing and probably the most common bug
+ report on our bug tracker.
+
+ .. _`PEP 526`: https://www.python.org/dev/peps/pep-0526/
+ :param bool kw_only: Make all attributes keyword-only (Python 3+)
+ in the generated ``__init__`` (if ``init`` is ``False``, this
+ parameter is ignored).
+ :param bool cache_hash: Ensure that the object's hash code is computed
+ only once and stored on the object. If this is set to ``True``,
+ hashing must be either explicitly or implicitly enabled for this
+ class. If the hash code is cached, avoid any reassignments of
+ fields involved in hash code computation or mutations of the objects
+ those fields point to after object creation. If such changes occur,
+ the behavior of the object's hash code is undefined.
+ :param bool auto_exc: If the class subclasses `BaseException`
+ (which implicitly includes any subclass of any exception), the
+ following happens to behave like a well-behaved Python exceptions
+ class:
+
+ - the values for *eq*, *order*, and *hash* are ignored and the
+ instances compare and hash by the instance's ids (N.B. ``attrs`` will
+ *not* remove existing implementations of ``__hash__`` or the equality
+ methods. It just won't add own ones.),
+ - all attributes that are either passed into ``__init__`` or have a
+ default value are additionally available as a tuple in the ``args``
+ attribute,
+ - the value of *str* is ignored leaving ``__str__`` to base classes.
+ :param bool collect_by_mro: Setting this to `True` fixes the way ``attrs``
+ collects attributes from base classes. The default behavior is
+ incorrect in certain cases of multiple inheritance. It should be on by
+ default but is kept off for backward-compatability.
+
+ See issue `#428 `_ for
+ more details.
+
+ :param Optional[bool] getstate_setstate:
+ .. note::
+ This is usually only interesting for slotted classes and you should
+ probably just set *auto_detect* to `True`.
+
+ If `True`, ``__getstate__`` and
+ ``__setstate__`` are generated and attached to the class. This is
+ necessary for slotted classes to be pickleable. If left `None`, it's
+ `True` by default for slotted classes and ``False`` for dict classes.
+
+ If *auto_detect* is `True`, and *getstate_setstate* is left `None`,
+ and **either** ``__getstate__`` or ``__setstate__`` is detected directly
+ on the class (i.e. not inherited), it is set to `False` (this is usually
+ what you want).
+
+ :param on_setattr: A callable that is run whenever the user attempts to set
+ an attribute (either by assignment like ``i.x = 42`` or by using
+ `setattr` like ``setattr(i, "x", 42)``). It receives the same arguments
+ as validators: the instance, the attribute that is being modified, and
+ the new value.
+
+ If no exception is raised, the attribute is set to the return value of
+ the callable.
+
+ If a list of callables is passed, they're automatically wrapped in an
+ `attr.setters.pipe`.
+
+ :param Optional[callable] field_transformer:
+ A function that is called with the original class object and all
+ fields right before ``attrs`` finalizes the class. You can use
+ this, e.g., to automatically add converters or validators to
+ fields based on their types. See `transform-fields` for more details.
+
+ .. versionadded:: 16.0.0 *slots*
+ .. versionadded:: 16.1.0 *frozen*
+ .. versionadded:: 16.3.0 *str*
+ .. versionadded:: 16.3.0 Support for ``__attrs_post_init__``.
+ .. versionchanged:: 17.1.0
+ *hash* supports ``None`` as value which is also the default now.
+ .. versionadded:: 17.3.0 *auto_attribs*
+ .. versionchanged:: 18.1.0
+ If *these* is passed, no attributes are deleted from the class body.
+ .. versionchanged:: 18.1.0 If *these* is ordered, the order is retained.
+ .. versionadded:: 18.2.0 *weakref_slot*
+ .. deprecated:: 18.2.0
+ ``__lt__``, ``__le__``, ``__gt__``, and ``__ge__`` now raise a
+ `DeprecationWarning` if the classes compared are subclasses of
+ each other. ``__eq`` and ``__ne__`` never tried to compared subclasses
+ to each other.
+ .. versionchanged:: 19.2.0
+ ``__lt__``, ``__le__``, ``__gt__``, and ``__ge__`` now do not consider
+ subclasses comparable anymore.
+ .. versionadded:: 18.2.0 *kw_only*
+ .. versionadded:: 18.2.0 *cache_hash*
+ .. versionadded:: 19.1.0 *auto_exc*
+ .. deprecated:: 19.2.0 *cmp* Removal on or after 2021-06-01.
+ .. versionadded:: 19.2.0 *eq* and *order*
+ .. versionadded:: 20.1.0 *auto_detect*
+ .. versionadded:: 20.1.0 *collect_by_mro*
+ .. versionadded:: 20.1.0 *getstate_setstate*
+ .. versionadded:: 20.1.0 *on_setattr*
+ .. versionadded:: 20.3.0 *field_transformer*
+ .. versionchanged:: 21.1.0
+ ``init=False`` injects ``__attrs_init__``
+ .. versionchanged:: 21.1.0 Support for ``__attrs_pre_init__``
+ .. versionchanged:: 21.1.0 *cmp* undeprecated
+ """
+ if auto_detect and PY2:
+ raise PythonTooOldError(
+ "auto_detect only works on Python 3 and later."
+ )
+
+ eq_, order_ = _determine_attrs_eq_order(cmp, eq, order, None)
+ hash_ = hash # work around the lack of nonlocal
+
+ if isinstance(on_setattr, (list, tuple)):
+ on_setattr = setters.pipe(*on_setattr)
+
+ def wrap(cls):
+
+ if getattr(cls, "__class__", None) is None:
+ raise TypeError("attrs only works with new-style classes.")
+
+ is_frozen = frozen or _has_frozen_base_class(cls)
+ is_exc = auto_exc is True and issubclass(cls, BaseException)
+ has_own_setattr = auto_detect and _has_own_attribute(
+ cls, "__setattr__"
+ )
+
+ if has_own_setattr and is_frozen:
+ raise ValueError("Can't freeze a class with a custom __setattr__.")
+
+ builder = _ClassBuilder(
+ cls,
+ these,
+ slots,
+ is_frozen,
+ weakref_slot,
+ _determine_whether_to_implement(
+ cls,
+ getstate_setstate,
+ auto_detect,
+ ("__getstate__", "__setstate__"),
+ default=slots,
+ ),
+ auto_attribs,
+ kw_only,
+ cache_hash,
+ is_exc,
+ collect_by_mro,
+ on_setattr,
+ has_own_setattr,
+ field_transformer,
+ )
+ if _determine_whether_to_implement(
+ cls, repr, auto_detect, ("__repr__",)
+ ):
+ builder.add_repr(repr_ns)
+ if str is True:
+ builder.add_str()
+
+ eq = _determine_whether_to_implement(
+ cls, eq_, auto_detect, ("__eq__", "__ne__")
+ )
+ if not is_exc and eq is True:
+ builder.add_eq()
+ if not is_exc and _determine_whether_to_implement(
+ cls, order_, auto_detect, ("__lt__", "__le__", "__gt__", "__ge__")
+ ):
+ builder.add_order()
+
+ builder.add_setattr()
+
+ if (
+ hash_ is None
+ and auto_detect is True
+ and _has_own_attribute(cls, "__hash__")
+ ):
+ hash = False
+ else:
+ hash = hash_
+ if hash is not True and hash is not False and hash is not None:
+ # Can't use `hash in` because 1 == True for example.
+ raise TypeError(
+ "Invalid value for hash. Must be True, False, or None."
+ )
+ elif hash is False or (hash is None and eq is False) or is_exc:
+ # Don't do anything. Should fall back to __object__'s __hash__
+ # which is by id.
+ if cache_hash:
+ raise TypeError(
+ "Invalid value for cache_hash. To use hash caching,"
+ " hashing must be either explicitly or implicitly "
+ "enabled."
+ )
+ elif hash is True or (
+ hash is None and eq is True and is_frozen is True
+ ):
+ # Build a __hash__ if told so, or if it's safe.
+ builder.add_hash()
+ else:
+ # Raise TypeError on attempts to hash.
+ if cache_hash:
+ raise TypeError(
+ "Invalid value for cache_hash. To use hash caching,"
+ " hashing must be either explicitly or implicitly "
+ "enabled."
+ )
+ builder.make_unhashable()
+
+ if _determine_whether_to_implement(
+ cls, init, auto_detect, ("__init__",)
+ ):
+ builder.add_init()
+ else:
+ builder.add_attrs_init()
+ if cache_hash:
+ raise TypeError(
+ "Invalid value for cache_hash. To use hash caching,"
+ " init must be True."
+ )
+
+ return builder.build_class()
+
+ # maybe_cls's type depends on the usage of the decorator. It's a class
+ # if it's used as `@attrs` but ``None`` if used as `@attrs()`.
+ if maybe_cls is None:
+ return wrap
+ else:
+ return wrap(maybe_cls)
+
+
+_attrs = attrs
+"""
+Internal alias so we can use it in functions that take an argument called
+*attrs*.
+"""
+
+
+if PY2:
+
+ def _has_frozen_base_class(cls):
+ """
+ Check whether *cls* has a frozen ancestor by looking at its
+ __setattr__.
+ """
+ return (
+ getattr(cls.__setattr__, "__module__", None)
+ == _frozen_setattrs.__module__
+ and cls.__setattr__.__name__ == _frozen_setattrs.__name__
+ )
+
+
+else:
+
+ def _has_frozen_base_class(cls):
+ """
+ Check whether *cls* has a frozen ancestor by looking at its
+ __setattr__.
+ """
+ return cls.__setattr__ == _frozen_setattrs
+
+
+def _generate_unique_filename(cls, func_name):
+ """
+ Create a "filename" suitable for a function being generated.
+ """
+ unique_id = uuid.uuid4()
+ extra = ""
+ count = 1
+
+ while True:
+ unique_filename = "".format(
+ func_name,
+ cls.__module__,
+ getattr(cls, "__qualname__", cls.__name__),
+ extra,
+ )
+ # To handle concurrency we essentially "reserve" our spot in
+ # the linecache with a dummy line. The caller can then
+ # set this value correctly.
+ cache_line = (1, None, (str(unique_id),), unique_filename)
+ if (
+ linecache.cache.setdefault(unique_filename, cache_line)
+ == cache_line
+ ):
+ return unique_filename
+
+ # Looks like this spot is taken. Try again.
+ count += 1
+ extra = "-{0}".format(count)
+
+
+def _make_hash(cls, attrs, frozen, cache_hash):
+ attrs = tuple(
+ a for a in attrs if a.hash is True or (a.hash is None and a.eq is True)
+ )
+
+ tab = " "
+
+ unique_filename = _generate_unique_filename(cls, "hash")
+ type_hash = hash(unique_filename)
+
+ hash_def = "def __hash__(self"
+ hash_func = "hash(("
+ closing_braces = "))"
+ if not cache_hash:
+ hash_def += "):"
+ else:
+ if not PY2:
+ hash_def += ", *"
+
+ hash_def += (
+ ", _cache_wrapper="
+ + "__import__('attr._make')._make._CacheHashWrapper):"
+ )
+ hash_func = "_cache_wrapper(" + hash_func
+ closing_braces += ")"
+
+ method_lines = [hash_def]
+
+ def append_hash_computation_lines(prefix, indent):
+ """
+ Generate the code for actually computing the hash code.
+ Below this will either be returned directly or used to compute
+ a value which is then cached, depending on the value of cache_hash
+ """
+
+ method_lines.extend(
+ [
+ indent + prefix + hash_func,
+ indent + " %d," % (type_hash,),
+ ]
+ )
+
+ for a in attrs:
+ method_lines.append(indent + " self.%s," % a.name)
+
+ method_lines.append(indent + " " + closing_braces)
+
+ if cache_hash:
+ method_lines.append(tab + "if self.%s is None:" % _hash_cache_field)
+ if frozen:
+ append_hash_computation_lines(
+ "object.__setattr__(self, '%s', " % _hash_cache_field, tab * 2
+ )
+ method_lines.append(tab * 2 + ")") # close __setattr__
+ else:
+ append_hash_computation_lines(
+ "self.%s = " % _hash_cache_field, tab * 2
+ )
+ method_lines.append(tab + "return self.%s" % _hash_cache_field)
+ else:
+ append_hash_computation_lines("return ", tab)
+
+ script = "\n".join(method_lines)
+ return _make_method("__hash__", script, unique_filename)
+
+
+def _add_hash(cls, attrs):
+ """
+ Add a hash method to *cls*.
+ """
+ cls.__hash__ = _make_hash(cls, attrs, frozen=False, cache_hash=False)
+ return cls
+
+
+def _make_ne():
+ """
+ Create __ne__ method.
+ """
+
+ def __ne__(self, other):
+ """
+ Check equality and either forward a NotImplemented or
+ return the result negated.
+ """
+ result = self.__eq__(other)
+ if result is NotImplemented:
+ return NotImplemented
+
+ return not result
+
+ return __ne__
+
+
+def _make_eq(cls, attrs):
+ """
+ Create __eq__ method for *cls* with *attrs*.
+ """
+ attrs = [a for a in attrs if a.eq]
+
+ unique_filename = _generate_unique_filename(cls, "eq")
+ lines = [
+ "def __eq__(self, other):",
+ " if other.__class__ is not self.__class__:",
+ " return NotImplemented",
+ ]
+
+ # We can't just do a big self.x = other.x and... clause due to
+ # irregularities like nan == nan is false but (nan,) == (nan,) is true.
+ globs = {}
+ if attrs:
+ lines.append(" return (")
+ others = [" ) == ("]
+ for a in attrs:
+ if a.eq_key:
+ cmp_name = "_%s_key" % (a.name,)
+ # Add the key function to the global namespace
+ # of the evaluated function.
+ globs[cmp_name] = a.eq_key
+ lines.append(
+ " %s(self.%s),"
+ % (
+ cmp_name,
+ a.name,
+ )
+ )
+ others.append(
+ " %s(other.%s),"
+ % (
+ cmp_name,
+ a.name,
+ )
+ )
+ else:
+ lines.append(" self.%s," % (a.name,))
+ others.append(" other.%s," % (a.name,))
+
+ lines += others + [" )"]
+ else:
+ lines.append(" return True")
+
+ script = "\n".join(lines)
+
+ return _make_method("__eq__", script, unique_filename, globs)
+
+
+def _make_order(cls, attrs):
+ """
+ Create ordering methods for *cls* with *attrs*.
+ """
+ attrs = [a for a in attrs if a.order]
+
+ def attrs_to_tuple(obj):
+ """
+ Save us some typing.
+ """
+ return tuple(
+ key(value) if key else value
+ for value, key in (
+ (getattr(obj, a.name), a.order_key) for a in attrs
+ )
+ )
+
+ def __lt__(self, other):
+ """
+ Automatically created by attrs.
+ """
+ if other.__class__ is self.__class__:
+ return attrs_to_tuple(self) < attrs_to_tuple(other)
+
+ return NotImplemented
+
+ def __le__(self, other):
+ """
+ Automatically created by attrs.
+ """
+ if other.__class__ is self.__class__:
+ return attrs_to_tuple(self) <= attrs_to_tuple(other)
+
+ return NotImplemented
+
+ def __gt__(self, other):
+ """
+ Automatically created by attrs.
+ """
+ if other.__class__ is self.__class__:
+ return attrs_to_tuple(self) > attrs_to_tuple(other)
+
+ return NotImplemented
+
+ def __ge__(self, other):
+ """
+ Automatically created by attrs.
+ """
+ if other.__class__ is self.__class__:
+ return attrs_to_tuple(self) >= attrs_to_tuple(other)
+
+ return NotImplemented
+
+ return __lt__, __le__, __gt__, __ge__
+
+
+def _add_eq(cls, attrs=None):
+ """
+ Add equality methods to *cls* with *attrs*.
+ """
+ if attrs is None:
+ attrs = cls.__attrs_attrs__
+
+ cls.__eq__ = _make_eq(cls, attrs)
+ cls.__ne__ = _make_ne()
+
+ return cls
+
+
+_already_repring = threading.local()
+
+
+def _make_repr(attrs, ns):
+ """
+ Make a repr method that includes relevant *attrs*, adding *ns* to the full
+ name.
+ """
+
+ # Figure out which attributes to include, and which function to use to
+ # format them. The a.repr value can be either bool or a custom callable.
+ attr_names_with_reprs = tuple(
+ (a.name, repr if a.repr is True else a.repr)
+ for a in attrs
+ if a.repr is not False
+ )
+
+ def __repr__(self):
+ """
+ Automatically created by attrs.
+ """
+ try:
+ working_set = _already_repring.working_set
+ except AttributeError:
+ working_set = set()
+ _already_repring.working_set = working_set
+
+ if id(self) in working_set:
+ return "..."
+ real_cls = self.__class__
+ if ns is None:
+ qualname = getattr(real_cls, "__qualname__", None)
+ if qualname is not None:
+ class_name = qualname.rsplit(">.", 1)[-1]
+ else:
+ class_name = real_cls.__name__
+ else:
+ class_name = ns + "." + real_cls.__name__
+
+ # Since 'self' remains on the stack (i.e.: strongly referenced) for the
+ # duration of this call, it's safe to depend on id(...) stability, and
+ # not need to track the instance and therefore worry about properties
+ # like weakref- or hash-ability.
+ working_set.add(id(self))
+ try:
+ result = [class_name, "("]
+ first = True
+ for name, attr_repr in attr_names_with_reprs:
+ if first:
+ first = False
+ else:
+ result.append(", ")
+ result.extend(
+ (name, "=", attr_repr(getattr(self, name, NOTHING)))
+ )
+ return "".join(result) + ")"
+ finally:
+ working_set.remove(id(self))
+
+ return __repr__
+
+
+def _add_repr(cls, ns=None, attrs=None):
+ """
+ Add a repr method to *cls*.
+ """
+ if attrs is None:
+ attrs = cls.__attrs_attrs__
+
+ cls.__repr__ = _make_repr(attrs, ns)
+ return cls
+
+
+def fields(cls):
+ """
+ Return the tuple of ``attrs`` attributes for a class.
+
+ The tuple also allows accessing the fields by their names (see below for
+ examples).
+
+ :param type cls: Class to introspect.
+
+ :raise TypeError: If *cls* is not a class.
+ :raise attr.exceptions.NotAnAttrsClassError: If *cls* is not an ``attrs``
+ class.
+
+ :rtype: tuple (with name accessors) of `attr.Attribute`
+
+ .. versionchanged:: 16.2.0 Returned tuple allows accessing the fields
+ by name.
+ """
+ if not isclass(cls):
+ raise TypeError("Passed object must be a class.")
+ attrs = getattr(cls, "__attrs_attrs__", None)
+ if attrs is None:
+ raise NotAnAttrsClassError(
+ "{cls!r} is not an attrs-decorated class.".format(cls=cls)
+ )
+ return attrs
+
+
+def fields_dict(cls):
+ """
+ Return an ordered dictionary of ``attrs`` attributes for a class, whose
+ keys are the attribute names.
+
+ :param type cls: Class to introspect.
+
+ :raise TypeError: If *cls* is not a class.
+ :raise attr.exceptions.NotAnAttrsClassError: If *cls* is not an ``attrs``
+ class.
+
+ :rtype: an ordered dict where keys are attribute names and values are
+ `attr.Attribute`\\ s. This will be a `dict` if it's
+ naturally ordered like on Python 3.6+ or an
+ :class:`~collections.OrderedDict` otherwise.
+
+ .. versionadded:: 18.1.0
+ """
+ if not isclass(cls):
+ raise TypeError("Passed object must be a class.")
+ attrs = getattr(cls, "__attrs_attrs__", None)
+ if attrs is None:
+ raise NotAnAttrsClassError(
+ "{cls!r} is not an attrs-decorated class.".format(cls=cls)
+ )
+ return ordered_dict(((a.name, a) for a in attrs))
+
+
+def validate(inst):
+ """
+ Validate all attributes on *inst* that have a validator.
+
+ Leaves all exceptions through.
+
+ :param inst: Instance of a class with ``attrs`` attributes.
+ """
+ if _config._run_validators is False:
+ return
+
+ for a in fields(inst.__class__):
+ v = a.validator
+ if v is not None:
+ v(inst, a, getattr(inst, a.name))
+
+
+def _is_slot_cls(cls):
+ return "__slots__" in cls.__dict__
+
+
+def _is_slot_attr(a_name, base_attr_map):
+ """
+ Check if the attribute name comes from a slot class.
+ """
+ return a_name in base_attr_map and _is_slot_cls(base_attr_map[a_name])
+
+
+def _make_init(
+ cls,
+ attrs,
+ pre_init,
+ post_init,
+ frozen,
+ slots,
+ cache_hash,
+ base_attr_map,
+ is_exc,
+ has_global_on_setattr,
+ attrs_init,
+):
+ if frozen and has_global_on_setattr:
+ raise ValueError("Frozen classes can't use on_setattr.")
+
+ needs_cached_setattr = cache_hash or frozen
+ filtered_attrs = []
+ attr_dict = {}
+ for a in attrs:
+ if not a.init and a.default is NOTHING:
+ continue
+
+ filtered_attrs.append(a)
+ attr_dict[a.name] = a
+
+ if a.on_setattr is not None:
+ if frozen is True:
+ raise ValueError("Frozen classes can't use on_setattr.")
+
+ needs_cached_setattr = True
+ elif (
+ has_global_on_setattr and a.on_setattr is not setters.NO_OP
+ ) or _is_slot_attr(a.name, base_attr_map):
+ needs_cached_setattr = True
+
+ unique_filename = _generate_unique_filename(cls, "init")
+
+ script, globs, annotations = _attrs_to_init_script(
+ filtered_attrs,
+ frozen,
+ slots,
+ pre_init,
+ post_init,
+ cache_hash,
+ base_attr_map,
+ is_exc,
+ needs_cached_setattr,
+ has_global_on_setattr,
+ attrs_init,
+ )
+ if cls.__module__ in sys.modules:
+ # This makes typing.get_type_hints(CLS.__init__) resolve string types.
+ globs.update(sys.modules[cls.__module__].__dict__)
+
+ globs.update({"NOTHING": NOTHING, "attr_dict": attr_dict})
+
+ if needs_cached_setattr:
+ # Save the lookup overhead in __init__ if we need to circumvent
+ # setattr hooks.
+ globs["_cached_setattr"] = _obj_setattr
+
+ init = _make_method(
+ "__attrs_init__" if attrs_init else "__init__",
+ script,
+ unique_filename,
+ globs,
+ )
+ init.__annotations__ = annotations
+
+ return init
+
+
+def _setattr(attr_name, value_var, has_on_setattr):
+ """
+ Use the cached object.setattr to set *attr_name* to *value_var*.
+ """
+ return "_setattr('%s', %s)" % (attr_name, value_var)
+
+
+def _setattr_with_converter(attr_name, value_var, has_on_setattr):
+ """
+ Use the cached object.setattr to set *attr_name* to *value_var*, but run
+ its converter first.
+ """
+ return "_setattr('%s', %s(%s))" % (
+ attr_name,
+ _init_converter_pat % (attr_name,),
+ value_var,
+ )
+
+
+def _assign(attr_name, value, has_on_setattr):
+ """
+ Unless *attr_name* has an on_setattr hook, use normal assignment. Otherwise
+ relegate to _setattr.
+ """
+ if has_on_setattr:
+ return _setattr(attr_name, value, True)
+
+ return "self.%s = %s" % (attr_name, value)
+
+
+def _assign_with_converter(attr_name, value_var, has_on_setattr):
+ """
+ Unless *attr_name* has an on_setattr hook, use normal assignment after
+ conversion. Otherwise relegate to _setattr_with_converter.
+ """
+ if has_on_setattr:
+ return _setattr_with_converter(attr_name, value_var, True)
+
+ return "self.%s = %s(%s)" % (
+ attr_name,
+ _init_converter_pat % (attr_name,),
+ value_var,
+ )
+
+
+if PY2:
+
+ def _unpack_kw_only_py2(attr_name, default=None):
+ """
+ Unpack *attr_name* from _kw_only dict.
+ """
+ if default is not None:
+ arg_default = ", %s" % default
+ else:
+ arg_default = ""
+ return "%s = _kw_only.pop('%s'%s)" % (
+ attr_name,
+ attr_name,
+ arg_default,
+ )
+
+ def _unpack_kw_only_lines_py2(kw_only_args):
+ """
+ Unpack all *kw_only_args* from _kw_only dict and handle errors.
+
+ Given a list of strings "{attr_name}" and "{attr_name}={default}"
+ generates list of lines of code that pop attrs from _kw_only dict and
+ raise TypeError similar to builtin if required attr is missing or
+ extra key is passed.
+
+ >>> print("\n".join(_unpack_kw_only_lines_py2(["a", "b=42"])))
+ try:
+ a = _kw_only.pop('a')
+ b = _kw_only.pop('b', 42)
+ except KeyError as _key_error:
+ raise TypeError(
+ ...
+ if _kw_only:
+ raise TypeError(
+ ...
+ """
+ lines = ["try:"]
+ lines.extend(
+ " " + _unpack_kw_only_py2(*arg.split("="))
+ for arg in kw_only_args
+ )
+ lines += """\
+except KeyError as _key_error:
+ raise TypeError(
+ '__init__() missing required keyword-only argument: %s' % _key_error
+ )
+if _kw_only:
+ raise TypeError(
+ '__init__() got an unexpected keyword argument %r'
+ % next(iter(_kw_only))
+ )
+""".split(
+ "\n"
+ )
+ return lines
+
+
+def _attrs_to_init_script(
+ attrs,
+ frozen,
+ slots,
+ pre_init,
+ post_init,
+ cache_hash,
+ base_attr_map,
+ is_exc,
+ needs_cached_setattr,
+ has_global_on_setattr,
+ attrs_init,
+):
+ """
+ Return a script of an initializer for *attrs* and a dict of globals.
+
+ The globals are expected by the generated script.
+
+ If *frozen* is True, we cannot set the attributes directly so we use
+ a cached ``object.__setattr__``.
+ """
+ lines = []
+ if pre_init:
+ lines.append("self.__attrs_pre_init__()")
+
+ if needs_cached_setattr:
+ lines.append(
+ # Circumvent the __setattr__ descriptor to save one lookup per
+ # assignment.
+ # Note _setattr will be used again below if cache_hash is True
+ "_setattr = _cached_setattr.__get__(self, self.__class__)"
+ )
+
+ if frozen is True:
+ if slots is True:
+ fmt_setter = _setattr
+ fmt_setter_with_converter = _setattr_with_converter
+ else:
+ # Dict frozen classes assign directly to __dict__.
+ # But only if the attribute doesn't come from an ancestor slot
+ # class.
+ # Note _inst_dict will be used again below if cache_hash is True
+ lines.append("_inst_dict = self.__dict__")
+
+ def fmt_setter(attr_name, value_var, has_on_setattr):
+ if _is_slot_attr(attr_name, base_attr_map):
+ return _setattr(attr_name, value_var, has_on_setattr)
+
+ return "_inst_dict['%s'] = %s" % (attr_name, value_var)
+
+ def fmt_setter_with_converter(
+ attr_name, value_var, has_on_setattr
+ ):
+ if has_on_setattr or _is_slot_attr(attr_name, base_attr_map):
+ return _setattr_with_converter(
+ attr_name, value_var, has_on_setattr
+ )
+
+ return "_inst_dict['%s'] = %s(%s)" % (
+ attr_name,
+ _init_converter_pat % (attr_name,),
+ value_var,
+ )
+
+ else:
+ # Not frozen.
+ fmt_setter = _assign
+ fmt_setter_with_converter = _assign_with_converter
+
+ args = []
+ kw_only_args = []
+ attrs_to_validate = []
+
+ # This is a dictionary of names to validator and converter callables.
+ # Injecting this into __init__ globals lets us avoid lookups.
+ names_for_globals = {}
+ annotations = {"return": None}
+
+ for a in attrs:
+ if a.validator:
+ attrs_to_validate.append(a)
+
+ attr_name = a.name
+ has_on_setattr = a.on_setattr is not None or (
+ a.on_setattr is not setters.NO_OP and has_global_on_setattr
+ )
+ arg_name = a.name.lstrip("_")
+
+ has_factory = isinstance(a.default, Factory)
+ if has_factory and a.default.takes_self:
+ maybe_self = "self"
+ else:
+ maybe_self = ""
+
+ if a.init is False:
+ if has_factory:
+ init_factory_name = _init_factory_pat.format(a.name)
+ if a.converter is not None:
+ lines.append(
+ fmt_setter_with_converter(
+ attr_name,
+ init_factory_name + "(%s)" % (maybe_self,),
+ has_on_setattr,
+ )
+ )
+ conv_name = _init_converter_pat % (a.name,)
+ names_for_globals[conv_name] = a.converter
+ else:
+ lines.append(
+ fmt_setter(
+ attr_name,
+ init_factory_name + "(%s)" % (maybe_self,),
+ has_on_setattr,
+ )
+ )
+ names_for_globals[init_factory_name] = a.default.factory
+ else:
+ if a.converter is not None:
+ lines.append(
+ fmt_setter_with_converter(
+ attr_name,
+ "attr_dict['%s'].default" % (attr_name,),
+ has_on_setattr,
+ )
+ )
+ conv_name = _init_converter_pat % (a.name,)
+ names_for_globals[conv_name] = a.converter
+ else:
+ lines.append(
+ fmt_setter(
+ attr_name,
+ "attr_dict['%s'].default" % (attr_name,),
+ has_on_setattr,
+ )
+ )
+ elif a.default is not NOTHING and not has_factory:
+ arg = "%s=attr_dict['%s'].default" % (arg_name, attr_name)
+ if a.kw_only:
+ kw_only_args.append(arg)
+ else:
+ args.append(arg)
+
+ if a.converter is not None:
+ lines.append(
+ fmt_setter_with_converter(
+ attr_name, arg_name, has_on_setattr
+ )
+ )
+ names_for_globals[
+ _init_converter_pat % (a.name,)
+ ] = a.converter
+ else:
+ lines.append(fmt_setter(attr_name, arg_name, has_on_setattr))
+
+ elif has_factory:
+ arg = "%s=NOTHING" % (arg_name,)
+ if a.kw_only:
+ kw_only_args.append(arg)
+ else:
+ args.append(arg)
+ lines.append("if %s is not NOTHING:" % (arg_name,))
+
+ init_factory_name = _init_factory_pat.format(a.name)
+ if a.converter is not None:
+ lines.append(
+ " "
+ + fmt_setter_with_converter(
+ attr_name, arg_name, has_on_setattr
+ )
+ )
+ lines.append("else:")
+ lines.append(
+ " "
+ + fmt_setter_with_converter(
+ attr_name,
+ init_factory_name + "(" + maybe_self + ")",
+ has_on_setattr,
+ )
+ )
+ names_for_globals[
+ _init_converter_pat % (a.name,)
+ ] = a.converter
+ else:
+ lines.append(
+ " " + fmt_setter(attr_name, arg_name, has_on_setattr)
+ )
+ lines.append("else:")
+ lines.append(
+ " "
+ + fmt_setter(
+ attr_name,
+ init_factory_name + "(" + maybe_self + ")",
+ has_on_setattr,
+ )
+ )
+ names_for_globals[init_factory_name] = a.default.factory
+ else:
+ if a.kw_only:
+ kw_only_args.append(arg_name)
+ else:
+ args.append(arg_name)
+
+ if a.converter is not None:
+ lines.append(
+ fmt_setter_with_converter(
+ attr_name, arg_name, has_on_setattr
+ )
+ )
+ names_for_globals[
+ _init_converter_pat % (a.name,)
+ ] = a.converter
+ else:
+ lines.append(fmt_setter(attr_name, arg_name, has_on_setattr))
+
+ if a.init is True:
+ if a.type is not None and a.converter is None:
+ annotations[arg_name] = a.type
+ elif a.converter is not None and not PY2:
+ # Try to get the type from the converter.
+ sig = None
+ try:
+ sig = inspect.signature(a.converter)
+ except (ValueError, TypeError): # inspect failed
+ pass
+ if sig:
+ sig_params = list(sig.parameters.values())
+ if (
+ sig_params
+ and sig_params[0].annotation
+ is not inspect.Parameter.empty
+ ):
+ annotations[arg_name] = sig_params[0].annotation
+
+ if attrs_to_validate: # we can skip this if there are no validators.
+ names_for_globals["_config"] = _config
+ lines.append("if _config._run_validators is True:")
+ for a in attrs_to_validate:
+ val_name = "__attr_validator_" + a.name
+ attr_name = "__attr_" + a.name
+ lines.append(
+ " %s(self, %s, self.%s)" % (val_name, attr_name, a.name)
+ )
+ names_for_globals[val_name] = a.validator
+ names_for_globals[attr_name] = a
+
+ if post_init:
+ lines.append("self.__attrs_post_init__()")
+
+ # because this is set only after __attrs_post_init is called, a crash
+ # will result if post-init tries to access the hash code. This seemed
+ # preferable to setting this beforehand, in which case alteration to
+ # field values during post-init combined with post-init accessing the
+ # hash code would result in silent bugs.
+ if cache_hash:
+ if frozen:
+ if slots:
+ # if frozen and slots, then _setattr defined above
+ init_hash_cache = "_setattr('%s', %s)"
+ else:
+ # if frozen and not slots, then _inst_dict defined above
+ init_hash_cache = "_inst_dict['%s'] = %s"
+ else:
+ init_hash_cache = "self.%s = %s"
+ lines.append(init_hash_cache % (_hash_cache_field, "None"))
+
+ # For exceptions we rely on BaseException.__init__ for proper
+ # initialization.
+ if is_exc:
+ vals = ",".join("self." + a.name for a in attrs if a.init)
+
+ lines.append("BaseException.__init__(self, %s)" % (vals,))
+
+ args = ", ".join(args)
+ if kw_only_args:
+ if PY2:
+ lines = _unpack_kw_only_lines_py2(kw_only_args) + lines
+
+ args += "%s**_kw_only" % (", " if args else "",) # leading comma
+ else:
+ args += "%s*, %s" % (
+ ", " if args else "", # leading comma
+ ", ".join(kw_only_args), # kw_only args
+ )
+ return (
+ """\
+def {init_name}(self, {args}):
+ {lines}
+""".format(
+ init_name=("__attrs_init__" if attrs_init else "__init__"),
+ args=args,
+ lines="\n ".join(lines) if lines else "pass",
+ ),
+ names_for_globals,
+ annotations,
+ )
+
+
+class Attribute(object):
+ """
+ *Read-only* representation of an attribute.
+
+ Instances of this class are frequently used for introspection purposes
+ like:
+
+ - `fields` returns a tuple of them.
+ - Validators get them passed as the first argument.
+ - The *field transformer* hook receives a list of them.
+
+ :attribute name: The name of the attribute.
+ :attribute inherited: Whether or not that attribute has been inherited from
+ a base class.
+
+ Plus *all* arguments of `attr.ib` (except for ``factory``
+ which is only syntactic sugar for ``default=Factory(...)``.
+
+ .. versionadded:: 20.1.0 *inherited*
+ .. versionadded:: 20.1.0 *on_setattr*
+ .. versionchanged:: 20.2.0 *inherited* is not taken into account for
+ equality checks and hashing anymore.
+ .. versionadded:: 21.1.0 *eq_key* and *order_key*
+
+ For the full version history of the fields, see `attr.ib`.
+ """
+
+ __slots__ = (
+ "name",
+ "default",
+ "validator",
+ "repr",
+ "eq",
+ "eq_key",
+ "order",
+ "order_key",
+ "hash",
+ "init",
+ "metadata",
+ "type",
+ "converter",
+ "kw_only",
+ "inherited",
+ "on_setattr",
+ )
+
+ def __init__(
+ self,
+ name,
+ default,
+ validator,
+ repr,
+ cmp, # XXX: unused, remove along with other cmp code.
+ hash,
+ init,
+ inherited,
+ metadata=None,
+ type=None,
+ converter=None,
+ kw_only=False,
+ eq=None,
+ eq_key=None,
+ order=None,
+ order_key=None,
+ on_setattr=None,
+ ):
+ eq, eq_key, order, order_key = _determine_attrib_eq_order(
+ cmp, eq_key or eq, order_key or order, True
+ )
+
+ # Cache this descriptor here to speed things up later.
+ bound_setattr = _obj_setattr.__get__(self, Attribute)
+
+ # Despite the big red warning, people *do* instantiate `Attribute`
+ # themselves.
+ bound_setattr("name", name)
+ bound_setattr("default", default)
+ bound_setattr("validator", validator)
+ bound_setattr("repr", repr)
+ bound_setattr("eq", eq)
+ bound_setattr("eq_key", eq_key)
+ bound_setattr("order", order)
+ bound_setattr("order_key", order_key)
+ bound_setattr("hash", hash)
+ bound_setattr("init", init)
+ bound_setattr("converter", converter)
+ bound_setattr(
+ "metadata",
+ (
+ metadata_proxy(metadata)
+ if metadata
+ else _empty_metadata_singleton
+ ),
+ )
+ bound_setattr("type", type)
+ bound_setattr("kw_only", kw_only)
+ bound_setattr("inherited", inherited)
+ bound_setattr("on_setattr", on_setattr)
+
+ def __setattr__(self, name, value):
+ raise FrozenInstanceError()
+
+ @classmethod
+ def from_counting_attr(cls, name, ca, type=None):
+ # type holds the annotated value. deal with conflicts:
+ if type is None:
+ type = ca.type
+ elif ca.type is not None:
+ raise ValueError(
+ "Type annotation and type argument cannot both be present"
+ )
+ inst_dict = {
+ k: getattr(ca, k)
+ for k in Attribute.__slots__
+ if k
+ not in (
+ "name",
+ "validator",
+ "default",
+ "type",
+ "inherited",
+ ) # exclude methods and deprecated alias
+ }
+ return cls(
+ name=name,
+ validator=ca._validator,
+ default=ca._default,
+ type=type,
+ cmp=None,
+ inherited=False,
+ **inst_dict
+ )
+
+ @property
+ def cmp(self):
+ """
+ Simulate the presence of a cmp attribute and warn.
+ """
+ warnings.warn(_CMP_DEPRECATION, DeprecationWarning, stacklevel=2)
+
+ return self.eq and self.order
+
+ # Don't use attr.evolve since fields(Attribute) doesn't work
+ def evolve(self, **changes):
+ """
+ Copy *self* and apply *changes*.
+
+ This works similarly to `attr.evolve` but that function does not work
+ with ``Attribute``.
+
+ It is mainly meant to be used for `transform-fields`.
+
+ .. versionadded:: 20.3.0
+ """
+ new = copy.copy(self)
+
+ new._setattrs(changes.items())
+
+ return new
+
+ # Don't use _add_pickle since fields(Attribute) doesn't work
+ def __getstate__(self):
+ """
+ Play nice with pickle.
+ """
+ return tuple(
+ getattr(self, name) if name != "metadata" else dict(self.metadata)
+ for name in self.__slots__
+ )
+
+ def __setstate__(self, state):
+ """
+ Play nice with pickle.
+ """
+ self._setattrs(zip(self.__slots__, state))
+
+ def _setattrs(self, name_values_pairs):
+ bound_setattr = _obj_setattr.__get__(self, Attribute)
+ for name, value in name_values_pairs:
+ if name != "metadata":
+ bound_setattr(name, value)
+ else:
+ bound_setattr(
+ name,
+ metadata_proxy(value)
+ if value
+ else _empty_metadata_singleton,
+ )
+
+
+_a = [
+ Attribute(
+ name=name,
+ default=NOTHING,
+ validator=None,
+ repr=True,
+ cmp=None,
+ eq=True,
+ order=False,
+ hash=(name != "metadata"),
+ init=True,
+ inherited=False,
+ )
+ for name in Attribute.__slots__
+]
+
+Attribute = _add_hash(
+ _add_eq(
+ _add_repr(Attribute, attrs=_a),
+ attrs=[a for a in _a if a.name != "inherited"],
+ ),
+ attrs=[a for a in _a if a.hash and a.name != "inherited"],
+)
+
+
+class _CountingAttr(object):
+ """
+ Intermediate representation of attributes that uses a counter to preserve
+ the order in which the attributes have been defined.
+
+ *Internal* data structure of the attrs library. Running into is most
+ likely the result of a bug like a forgotten `@attr.s` decorator.
+ """
+
+ __slots__ = (
+ "counter",
+ "_default",
+ "repr",
+ "eq",
+ "eq_key",
+ "order",
+ "order_key",
+ "hash",
+ "init",
+ "metadata",
+ "_validator",
+ "converter",
+ "type",
+ "kw_only",
+ "on_setattr",
+ )
+ __attrs_attrs__ = tuple(
+ Attribute(
+ name=name,
+ default=NOTHING,
+ validator=None,
+ repr=True,
+ cmp=None,
+ hash=True,
+ init=True,
+ kw_only=False,
+ eq=True,
+ eq_key=None,
+ order=False,
+ order_key=None,
+ inherited=False,
+ on_setattr=None,
+ )
+ for name in (
+ "counter",
+ "_default",
+ "repr",
+ "eq",
+ "order",
+ "hash",
+ "init",
+ "on_setattr",
+ )
+ ) + (
+ Attribute(
+ name="metadata",
+ default=None,
+ validator=None,
+ repr=True,
+ cmp=None,
+ hash=False,
+ init=True,
+ kw_only=False,
+ eq=True,
+ eq_key=None,
+ order=False,
+ order_key=None,
+ inherited=False,
+ on_setattr=None,
+ ),
+ )
+ cls_counter = 0
+
+ def __init__(
+ self,
+ default,
+ validator,
+ repr,
+ cmp,
+ hash,
+ init,
+ converter,
+ metadata,
+ type,
+ kw_only,
+ eq,
+ eq_key,
+ order,
+ order_key,
+ on_setattr,
+ ):
+ _CountingAttr.cls_counter += 1
+ self.counter = _CountingAttr.cls_counter
+ self._default = default
+ self._validator = validator
+ self.converter = converter
+ self.repr = repr
+ self.eq = eq
+ self.eq_key = eq_key
+ self.order = order
+ self.order_key = order_key
+ self.hash = hash
+ self.init = init
+ self.metadata = metadata
+ self.type = type
+ self.kw_only = kw_only
+ self.on_setattr = on_setattr
+
+ def validator(self, meth):
+ """
+ Decorator that adds *meth* to the list of validators.
+
+ Returns *meth* unchanged.
+
+ .. versionadded:: 17.1.0
+ """
+ if self._validator is None:
+ self._validator = meth
+ else:
+ self._validator = and_(self._validator, meth)
+ return meth
+
+ def default(self, meth):
+ """
+ Decorator that allows to set the default for an attribute.
+
+ Returns *meth* unchanged.
+
+ :raises DefaultAlreadySetError: If default has been set before.
+
+ .. versionadded:: 17.1.0
+ """
+ if self._default is not NOTHING:
+ raise DefaultAlreadySetError()
+
+ self._default = Factory(meth, takes_self=True)
+
+ return meth
+
+
+_CountingAttr = _add_eq(_add_repr(_CountingAttr))
+
+
+class Factory(object):
+ """
+ Stores a factory callable.
+
+ If passed as the default value to `attr.ib`, the factory is used to
+ generate a new value.
+
+ :param callable factory: A callable that takes either none or exactly one
+ mandatory positional argument depending on *takes_self*.
+ :param bool takes_self: Pass the partially initialized instance that is
+ being initialized as a positional argument.
+
+ .. versionadded:: 17.1.0 *takes_self*
+ """
+
+ __slots__ = ("factory", "takes_self")
+
+ def __init__(self, factory, takes_self=False):
+ """
+ `Factory` is part of the default machinery so if we want a default
+ value here, we have to implement it ourselves.
+ """
+ self.factory = factory
+ self.takes_self = takes_self
+
+ def __getstate__(self):
+ """
+ Play nice with pickle.
+ """
+ return tuple(getattr(self, name) for name in self.__slots__)
+
+ def __setstate__(self, state):
+ """
+ Play nice with pickle.
+ """
+ for name, value in zip(self.__slots__, state):
+ setattr(self, name, value)
+
+
+_f = [
+ Attribute(
+ name=name,
+ default=NOTHING,
+ validator=None,
+ repr=True,
+ cmp=None,
+ eq=True,
+ order=False,
+ hash=True,
+ init=True,
+ inherited=False,
+ )
+ for name in Factory.__slots__
+]
+
+Factory = _add_hash(_add_eq(_add_repr(Factory, attrs=_f), attrs=_f), attrs=_f)
+
+
+def make_class(name, attrs, bases=(object,), **attributes_arguments):
+ """
+ A quick way to create a new class called *name* with *attrs*.
+
+ :param str name: The name for the new class.
+
+ :param attrs: A list of names or a dictionary of mappings of names to
+ attributes.
+
+ If *attrs* is a list or an ordered dict (`dict` on Python 3.6+,
+ `collections.OrderedDict` otherwise), the order is deduced from
+ the order of the names or attributes inside *attrs*. Otherwise the
+ order of the definition of the attributes is used.
+ :type attrs: `list` or `dict`
+
+ :param tuple bases: Classes that the new class will subclass.
+
+ :param attributes_arguments: Passed unmodified to `attr.s`.
+
+ :return: A new class with *attrs*.
+ :rtype: type
+
+ .. versionadded:: 17.1.0 *bases*
+ .. versionchanged:: 18.1.0 If *attrs* is ordered, the order is retained.
+ """
+ if isinstance(attrs, dict):
+ cls_dict = attrs
+ elif isinstance(attrs, (list, tuple)):
+ cls_dict = dict((a, attrib()) for a in attrs)
+ else:
+ raise TypeError("attrs argument must be a dict or a list.")
+
+ pre_init = cls_dict.pop("__attrs_pre_init__", None)
+ post_init = cls_dict.pop("__attrs_post_init__", None)
+ user_init = cls_dict.pop("__init__", None)
+
+ body = {}
+ if pre_init is not None:
+ body["__attrs_pre_init__"] = pre_init
+ if post_init is not None:
+ body["__attrs_post_init__"] = post_init
+ if user_init is not None:
+ body["__init__"] = user_init
+
+ type_ = new_class(name, bases, {}, lambda ns: ns.update(body))
+
+ # For pickling to work, the __module__ variable needs to be set to the
+ # frame where the class is created. Bypass this step in environments where
+ # sys._getframe is not defined (Jython for example) or sys._getframe is not
+ # defined for arguments greater than 0 (IronPython).
+ try:
+ type_.__module__ = sys._getframe(1).f_globals.get(
+ "__name__", "__main__"
+ )
+ except (AttributeError, ValueError):
+ pass
+
+ # We do it here for proper warnings with meaningful stacklevel.
+ cmp = attributes_arguments.pop("cmp", None)
+ (
+ attributes_arguments["eq"],
+ attributes_arguments["order"],
+ ) = _determine_attrs_eq_order(
+ cmp,
+ attributes_arguments.get("eq"),
+ attributes_arguments.get("order"),
+ True,
+ )
+
+ return _attrs(these=cls_dict, **attributes_arguments)(type_)
+
+
+# These are required by within this module so we define them here and merely
+# import into .validators / .converters.
+
+
+@attrs(slots=True, hash=True)
+class _AndValidator(object):
+ """
+ Compose many validators to a single one.
+ """
+
+ _validators = attrib()
+
+ def __call__(self, inst, attr, value):
+ for v in self._validators:
+ v(inst, attr, value)
+
+
+def and_(*validators):
+ """
+ A validator that composes multiple validators into one.
+
+ When called on a value, it runs all wrapped validators.
+
+ :param callables validators: Arbitrary number of validators.
+
+ .. versionadded:: 17.1.0
+ """
+ vals = []
+ for validator in validators:
+ vals.extend(
+ validator._validators
+ if isinstance(validator, _AndValidator)
+ else [validator]
+ )
+
+ return _AndValidator(tuple(vals))
+
+
+def pipe(*converters):
+ """
+ A converter that composes multiple converters into one.
+
+ When called on a value, it runs all wrapped converters, returning the
+ *last* value.
+
+ Type annotations will be inferred from the wrapped converters', if
+ they have any.
+
+ :param callables converters: Arbitrary number of converters.
+
+ .. versionadded:: 20.1.0
+ """
+
+ def pipe_converter(val):
+ for converter in converters:
+ val = converter(val)
+
+ return val
+
+ if not PY2:
+ if not converters:
+ # If the converter list is empty, pipe_converter is the identity.
+ A = typing.TypeVar("A")
+ pipe_converter.__annotations__ = {"val": A, "return": A}
+ else:
+ # Get parameter type.
+ sig = None
+ try:
+ sig = inspect.signature(converters[0])
+ except (ValueError, TypeError): # inspect failed
+ pass
+ if sig:
+ params = list(sig.parameters.values())
+ if (
+ params
+ and params[0].annotation is not inspect.Parameter.empty
+ ):
+ pipe_converter.__annotations__["val"] = params[
+ 0
+ ].annotation
+ # Get return type.
+ sig = None
+ try:
+ sig = inspect.signature(converters[-1])
+ except (ValueError, TypeError): # inspect failed
+ pass
+ if sig and sig.return_annotation is not inspect.Signature().empty:
+ pipe_converter.__annotations__[
+ "return"
+ ] = sig.return_annotation
+
+ return pipe_converter
diff --git a/openpype/hosts/fusion/vendor/attr/_next_gen.py b/openpype/hosts/fusion/vendor/attr/_next_gen.py
new file mode 100644
index 0000000000..fab0af966a
--- /dev/null
+++ b/openpype/hosts/fusion/vendor/attr/_next_gen.py
@@ -0,0 +1,158 @@
+"""
+These are Python 3.6+-only and keyword-only APIs that call `attr.s` and
+`attr.ib` with different default values.
+"""
+
+from functools import partial
+
+from attr.exceptions import UnannotatedAttributeError
+
+from . import setters
+from ._make import NOTHING, _frozen_setattrs, attrib, attrs
+
+
+def define(
+ maybe_cls=None,
+ *,
+ these=None,
+ repr=None,
+ hash=None,
+ init=None,
+ slots=True,
+ frozen=False,
+ weakref_slot=True,
+ str=False,
+ auto_attribs=None,
+ kw_only=False,
+ cache_hash=False,
+ auto_exc=True,
+ eq=None,
+ order=False,
+ auto_detect=True,
+ getstate_setstate=None,
+ on_setattr=None,
+ field_transformer=None,
+):
+ r"""
+ The only behavioral differences are the handling of the *auto_attribs*
+ option:
+
+ :param Optional[bool] auto_attribs: If set to `True` or `False`, it behaves
+ exactly like `attr.s`. If left `None`, `attr.s` will try to guess:
+
+ 1. If any attributes are annotated and no unannotated `attr.ib`\ s
+ are found, it assumes *auto_attribs=True*.
+ 2. Otherwise it assumes *auto_attribs=False* and tries to collect
+ `attr.ib`\ s.
+
+ and that mutable classes (``frozen=False``) validate on ``__setattr__``.
+
+ .. versionadded:: 20.1.0
+ """
+
+ def do_it(cls, auto_attribs):
+ return attrs(
+ maybe_cls=cls,
+ these=these,
+ repr=repr,
+ hash=hash,
+ init=init,
+ slots=slots,
+ frozen=frozen,
+ weakref_slot=weakref_slot,
+ str=str,
+ auto_attribs=auto_attribs,
+ kw_only=kw_only,
+ cache_hash=cache_hash,
+ auto_exc=auto_exc,
+ eq=eq,
+ order=order,
+ auto_detect=auto_detect,
+ collect_by_mro=True,
+ getstate_setstate=getstate_setstate,
+ on_setattr=on_setattr,
+ field_transformer=field_transformer,
+ )
+
+ def wrap(cls):
+ """
+ Making this a wrapper ensures this code runs during class creation.
+
+ We also ensure that frozen-ness of classes is inherited.
+ """
+ nonlocal frozen, on_setattr
+
+ had_on_setattr = on_setattr not in (None, setters.NO_OP)
+
+ # By default, mutable classes validate on setattr.
+ if frozen is False and on_setattr is None:
+ on_setattr = setters.validate
+
+ # However, if we subclass a frozen class, we inherit the immutability
+ # and disable on_setattr.
+ for base_cls in cls.__bases__:
+ if base_cls.__setattr__ is _frozen_setattrs:
+ if had_on_setattr:
+ raise ValueError(
+ "Frozen classes can't use on_setattr "
+ "(frozen-ness was inherited)."
+ )
+
+ on_setattr = setters.NO_OP
+ break
+
+ if auto_attribs is not None:
+ return do_it(cls, auto_attribs)
+
+ try:
+ return do_it(cls, True)
+ except UnannotatedAttributeError:
+ return do_it(cls, False)
+
+ # maybe_cls's type depends on the usage of the decorator. It's a class
+ # if it's used as `@attrs` but ``None`` if used as `@attrs()`.
+ if maybe_cls is None:
+ return wrap
+ else:
+ return wrap(maybe_cls)
+
+
+mutable = define
+frozen = partial(define, frozen=True, on_setattr=None)
+
+
+def field(
+ *,
+ default=NOTHING,
+ validator=None,
+ repr=True,
+ hash=None,
+ init=True,
+ metadata=None,
+ converter=None,
+ factory=None,
+ kw_only=False,
+ eq=None,
+ order=None,
+ on_setattr=None,
+):
+ """
+ Identical to `attr.ib`, except keyword-only and with some arguments
+ removed.
+
+ .. versionadded:: 20.1.0
+ """
+ return attrib(
+ default=default,
+ validator=validator,
+ repr=repr,
+ hash=hash,
+ init=init,
+ metadata=metadata,
+ converter=converter,
+ factory=factory,
+ kw_only=kw_only,
+ eq=eq,
+ order=order,
+ on_setattr=on_setattr,
+ )
diff --git a/openpype/hosts/fusion/vendor/attr/_version_info.py b/openpype/hosts/fusion/vendor/attr/_version_info.py
new file mode 100644
index 0000000000..014e78a1b4
--- /dev/null
+++ b/openpype/hosts/fusion/vendor/attr/_version_info.py
@@ -0,0 +1,85 @@
+from __future__ import absolute_import, division, print_function
+
+from functools import total_ordering
+
+from ._funcs import astuple
+from ._make import attrib, attrs
+
+
+@total_ordering
+@attrs(eq=False, order=False, slots=True, frozen=True)
+class VersionInfo(object):
+ """
+ A version object that can be compared to tuple of length 1--4:
+
+ >>> attr.VersionInfo(19, 1, 0, "final") <= (19, 2)
+ True
+ >>> attr.VersionInfo(19, 1, 0, "final") < (19, 1, 1)
+ True
+ >>> vi = attr.VersionInfo(19, 2, 0, "final")
+ >>> vi < (19, 1, 1)
+ False
+ >>> vi < (19,)
+ False
+ >>> vi == (19, 2,)
+ True
+ >>> vi == (19, 2, 1)
+ False
+
+ .. versionadded:: 19.2
+ """
+
+ year = attrib(type=int)
+ minor = attrib(type=int)
+ micro = attrib(type=int)
+ releaselevel = attrib(type=str)
+
+ @classmethod
+ def _from_version_string(cls, s):
+ """
+ Parse *s* and return a _VersionInfo.
+ """
+ v = s.split(".")
+ if len(v) == 3:
+ v.append("final")
+
+ return cls(
+ year=int(v[0]), minor=int(v[1]), micro=int(v[2]), releaselevel=v[3]
+ )
+
+ def _ensure_tuple(self, other):
+ """
+ Ensure *other* is a tuple of a valid length.
+
+ Returns a possibly transformed *other* and ourselves as a tuple of
+ the same length as *other*.
+ """
+
+ if self.__class__ is other.__class__:
+ other = astuple(other)
+
+ if not isinstance(other, tuple):
+ raise NotImplementedError
+
+ if not (1 <= len(other) <= 4):
+ raise NotImplementedError
+
+ return astuple(self)[: len(other)], other
+
+ def __eq__(self, other):
+ try:
+ us, them = self._ensure_tuple(other)
+ except NotImplementedError:
+ return NotImplemented
+
+ return us == them
+
+ def __lt__(self, other):
+ try:
+ us, them = self._ensure_tuple(other)
+ except NotImplementedError:
+ return NotImplemented
+
+ # Since alphabetically "dev0" < "final" < "post1" < "post2", we don't
+ # have to do anything special with releaselevel for now.
+ return us < them
diff --git a/openpype/hosts/fusion/vendor/attr/_version_info.pyi b/openpype/hosts/fusion/vendor/attr/_version_info.pyi
new file mode 100644
index 0000000000..45ced08633
--- /dev/null
+++ b/openpype/hosts/fusion/vendor/attr/_version_info.pyi
@@ -0,0 +1,9 @@
+class VersionInfo:
+ @property
+ def year(self) -> int: ...
+ @property
+ def minor(self) -> int: ...
+ @property
+ def micro(self) -> int: ...
+ @property
+ def releaselevel(self) -> str: ...
diff --git a/openpype/hosts/fusion/vendor/attr/converters.py b/openpype/hosts/fusion/vendor/attr/converters.py
new file mode 100644
index 0000000000..2777db6d0a
--- /dev/null
+++ b/openpype/hosts/fusion/vendor/attr/converters.py
@@ -0,0 +1,111 @@
+"""
+Commonly useful converters.
+"""
+
+from __future__ import absolute_import, division, print_function
+
+from ._compat import PY2
+from ._make import NOTHING, Factory, pipe
+
+
+if not PY2:
+ import inspect
+ import typing
+
+
+__all__ = [
+ "pipe",
+ "optional",
+ "default_if_none",
+]
+
+
+def optional(converter):
+ """
+ A converter that allows an attribute to be optional. An optional attribute
+ is one which can be set to ``None``.
+
+ Type annotations will be inferred from the wrapped converter's, if it
+ has any.
+
+ :param callable converter: the converter that is used for non-``None``
+ values.
+
+ .. versionadded:: 17.1.0
+ """
+
+ def optional_converter(val):
+ if val is None:
+ return None
+ return converter(val)
+
+ if not PY2:
+ sig = None
+ try:
+ sig = inspect.signature(converter)
+ except (ValueError, TypeError): # inspect failed
+ pass
+ if sig:
+ params = list(sig.parameters.values())
+ if params and params[0].annotation is not inspect.Parameter.empty:
+ optional_converter.__annotations__["val"] = typing.Optional[
+ params[0].annotation
+ ]
+ if sig.return_annotation is not inspect.Signature.empty:
+ optional_converter.__annotations__["return"] = typing.Optional[
+ sig.return_annotation
+ ]
+
+ return optional_converter
+
+
+def default_if_none(default=NOTHING, factory=None):
+ """
+ A converter that allows to replace ``None`` values by *default* or the
+ result of *factory*.
+
+ :param default: Value to be used if ``None`` is passed. Passing an instance
+ of `attr.Factory` is supported, however the ``takes_self`` option
+ is *not*.
+ :param callable factory: A callable that takes no parameters whose result
+ is used if ``None`` is passed.
+
+ :raises TypeError: If **neither** *default* or *factory* is passed.
+ :raises TypeError: If **both** *default* and *factory* are passed.
+ :raises ValueError: If an instance of `attr.Factory` is passed with
+ ``takes_self=True``.
+
+ .. versionadded:: 18.2.0
+ """
+ if default is NOTHING and factory is None:
+ raise TypeError("Must pass either `default` or `factory`.")
+
+ if default is not NOTHING and factory is not None:
+ raise TypeError(
+ "Must pass either `default` or `factory` but not both."
+ )
+
+ if factory is not None:
+ default = Factory(factory)
+
+ if isinstance(default, Factory):
+ if default.takes_self:
+ raise ValueError(
+ "`takes_self` is not supported by default_if_none."
+ )
+
+ def default_if_none_converter(val):
+ if val is not None:
+ return val
+
+ return default.factory()
+
+ else:
+
+ def default_if_none_converter(val):
+ if val is not None:
+ return val
+
+ return default
+
+ return default_if_none_converter
diff --git a/openpype/hosts/fusion/vendor/attr/converters.pyi b/openpype/hosts/fusion/vendor/attr/converters.pyi
new file mode 100644
index 0000000000..84a57590b0
--- /dev/null
+++ b/openpype/hosts/fusion/vendor/attr/converters.pyi
@@ -0,0 +1,13 @@
+from typing import Callable, Optional, TypeVar, overload
+
+from . import _ConverterType
+
+
+_T = TypeVar("_T")
+
+def pipe(*validators: _ConverterType) -> _ConverterType: ...
+def optional(converter: _ConverterType) -> _ConverterType: ...
+@overload
+def default_if_none(default: _T) -> _ConverterType: ...
+@overload
+def default_if_none(*, factory: Callable[[], _T]) -> _ConverterType: ...
diff --git a/openpype/hosts/fusion/vendor/attr/exceptions.py b/openpype/hosts/fusion/vendor/attr/exceptions.py
new file mode 100644
index 0000000000..f6f9861bea
--- /dev/null
+++ b/openpype/hosts/fusion/vendor/attr/exceptions.py
@@ -0,0 +1,92 @@
+from __future__ import absolute_import, division, print_function
+
+
+class FrozenError(AttributeError):
+ """
+ A frozen/immutable instance or attribute have been attempted to be
+ modified.
+
+ It mirrors the behavior of ``namedtuples`` by using the same error message
+ and subclassing `AttributeError`.
+
+ .. versionadded:: 20.1.0
+ """
+
+ msg = "can't set attribute"
+ args = [msg]
+
+
+class FrozenInstanceError(FrozenError):
+ """
+ A frozen instance has been attempted to be modified.
+
+ .. versionadded:: 16.1.0
+ """
+
+
+class FrozenAttributeError(FrozenError):
+ """
+ A frozen attribute has been attempted to be modified.
+
+ .. versionadded:: 20.1.0
+ """
+
+
+class AttrsAttributeNotFoundError(ValueError):
+ """
+ An ``attrs`` function couldn't find an attribute that the user asked for.
+
+ .. versionadded:: 16.2.0
+ """
+
+
+class NotAnAttrsClassError(ValueError):
+ """
+ A non-``attrs`` class has been passed into an ``attrs`` function.
+
+ .. versionadded:: 16.2.0
+ """
+
+
+class DefaultAlreadySetError(RuntimeError):
+ """
+ A default has been set using ``attr.ib()`` and is attempted to be reset
+ using the decorator.
+
+ .. versionadded:: 17.1.0
+ """
+
+
+class UnannotatedAttributeError(RuntimeError):
+ """
+ A class with ``auto_attribs=True`` has an ``attr.ib()`` without a type
+ annotation.
+
+ .. versionadded:: 17.3.0
+ """
+
+
+class PythonTooOldError(RuntimeError):
+ """
+ It was attempted to use an ``attrs`` feature that requires a newer Python
+ version.
+
+ .. versionadded:: 18.2.0
+ """
+
+
+class NotCallableError(TypeError):
+ """
+ A ``attr.ib()`` requiring a callable has been set with a value
+ that is not callable.
+
+ .. versionadded:: 19.2.0
+ """
+
+ def __init__(self, msg, value):
+ super(TypeError, self).__init__(msg, value)
+ self.msg = msg
+ self.value = value
+
+ def __str__(self):
+ return str(self.msg)
diff --git a/openpype/hosts/fusion/vendor/attr/exceptions.pyi b/openpype/hosts/fusion/vendor/attr/exceptions.pyi
new file mode 100644
index 0000000000..a800fb26bb
--- /dev/null
+++ b/openpype/hosts/fusion/vendor/attr/exceptions.pyi
@@ -0,0 +1,18 @@
+from typing import Any
+
+
+class FrozenError(AttributeError):
+ msg: str = ...
+
+class FrozenInstanceError(FrozenError): ...
+class FrozenAttributeError(FrozenError): ...
+class AttrsAttributeNotFoundError(ValueError): ...
+class NotAnAttrsClassError(ValueError): ...
+class DefaultAlreadySetError(RuntimeError): ...
+class UnannotatedAttributeError(RuntimeError): ...
+class PythonTooOldError(RuntimeError): ...
+
+class NotCallableError(TypeError):
+ msg: str = ...
+ value: Any = ...
+ def __init__(self, msg: str, value: Any) -> None: ...
diff --git a/openpype/hosts/fusion/vendor/attr/filters.py b/openpype/hosts/fusion/vendor/attr/filters.py
new file mode 100644
index 0000000000..dc47e8fa38
--- /dev/null
+++ b/openpype/hosts/fusion/vendor/attr/filters.py
@@ -0,0 +1,52 @@
+"""
+Commonly useful filters for `attr.asdict`.
+"""
+
+from __future__ import absolute_import, division, print_function
+
+from ._compat import isclass
+from ._make import Attribute
+
+
+def _split_what(what):
+ """
+ Returns a tuple of `frozenset`s of classes and attributes.
+ """
+ return (
+ frozenset(cls for cls in what if isclass(cls)),
+ frozenset(cls for cls in what if isinstance(cls, Attribute)),
+ )
+
+
+def include(*what):
+ """
+ Whitelist *what*.
+
+ :param what: What to whitelist.
+ :type what: `list` of `type` or `attr.Attribute`\\ s
+
+ :rtype: `callable`
+ """
+ cls, attrs = _split_what(what)
+
+ def include_(attribute, value):
+ return value.__class__ in cls or attribute in attrs
+
+ return include_
+
+
+def exclude(*what):
+ """
+ Blacklist *what*.
+
+ :param what: What to blacklist.
+ :type what: `list` of classes or `attr.Attribute`\\ s.
+
+ :rtype: `callable`
+ """
+ cls, attrs = _split_what(what)
+
+ def exclude_(attribute, value):
+ return value.__class__ not in cls and attribute not in attrs
+
+ return exclude_
diff --git a/openpype/hosts/fusion/vendor/attr/filters.pyi b/openpype/hosts/fusion/vendor/attr/filters.pyi
new file mode 100644
index 0000000000..f7b63f1bb4
--- /dev/null
+++ b/openpype/hosts/fusion/vendor/attr/filters.pyi
@@ -0,0 +1,7 @@
+from typing import Any, Union
+
+from . import Attribute, _FilterType
+
+
+def include(*what: Union[type, Attribute[Any]]) -> _FilterType[Any]: ...
+def exclude(*what: Union[type, Attribute[Any]]) -> _FilterType[Any]: ...
diff --git a/openpype/modules/ftrack/python2_vendor/arrow/tests/__init__.py b/openpype/hosts/fusion/vendor/attr/py.typed
similarity index 100%
rename from openpype/modules/ftrack/python2_vendor/arrow/tests/__init__.py
rename to openpype/hosts/fusion/vendor/attr/py.typed
diff --git a/openpype/hosts/fusion/vendor/attr/setters.py b/openpype/hosts/fusion/vendor/attr/setters.py
new file mode 100644
index 0000000000..240014b3c1
--- /dev/null
+++ b/openpype/hosts/fusion/vendor/attr/setters.py
@@ -0,0 +1,77 @@
+"""
+Commonly used hooks for on_setattr.
+"""
+
+from __future__ import absolute_import, division, print_function
+
+from . import _config
+from .exceptions import FrozenAttributeError
+
+
+def pipe(*setters):
+ """
+ Run all *setters* and return the return value of the last one.
+
+ .. versionadded:: 20.1.0
+ """
+
+ def wrapped_pipe(instance, attrib, new_value):
+ rv = new_value
+
+ for setter in setters:
+ rv = setter(instance, attrib, rv)
+
+ return rv
+
+ return wrapped_pipe
+
+
+def frozen(_, __, ___):
+ """
+ Prevent an attribute to be modified.
+
+ .. versionadded:: 20.1.0
+ """
+ raise FrozenAttributeError()
+
+
+def validate(instance, attrib, new_value):
+ """
+ Run *attrib*'s validator on *new_value* if it has one.
+
+ .. versionadded:: 20.1.0
+ """
+ if _config._run_validators is False:
+ return new_value
+
+ v = attrib.validator
+ if not v:
+ return new_value
+
+ v(instance, attrib, new_value)
+
+ return new_value
+
+
+def convert(instance, attrib, new_value):
+ """
+ Run *attrib*'s converter -- if it has one -- on *new_value* and return the
+ result.
+
+ .. versionadded:: 20.1.0
+ """
+ c = attrib.converter
+ if c:
+ return c(new_value)
+
+ return new_value
+
+
+NO_OP = object()
+"""
+Sentinel for disabling class-wide *on_setattr* hooks for certain attributes.
+
+Does not work in `pipe` or within lists.
+
+.. versionadded:: 20.1.0
+"""
diff --git a/openpype/hosts/fusion/vendor/attr/setters.pyi b/openpype/hosts/fusion/vendor/attr/setters.pyi
new file mode 100644
index 0000000000..a921e07deb
--- /dev/null
+++ b/openpype/hosts/fusion/vendor/attr/setters.pyi
@@ -0,0 +1,20 @@
+from typing import Any, NewType, NoReturn, TypeVar, cast
+
+from . import Attribute, _OnSetAttrType
+
+
+_T = TypeVar("_T")
+
+def frozen(
+ instance: Any, attribute: Attribute[Any], new_value: Any
+) -> NoReturn: ...
+def pipe(*setters: _OnSetAttrType) -> _OnSetAttrType: ...
+def validate(instance: Any, attribute: Attribute[_T], new_value: _T) -> _T: ...
+
+# convert is allowed to return Any, because they can be chained using pipe.
+def convert(
+ instance: Any, attribute: Attribute[Any], new_value: Any
+) -> Any: ...
+
+_NoOpType = NewType("_NoOpType", object)
+NO_OP: _NoOpType
diff --git a/openpype/hosts/fusion/vendor/attr/validators.py b/openpype/hosts/fusion/vendor/attr/validators.py
new file mode 100644
index 0000000000..b9a73054e9
--- /dev/null
+++ b/openpype/hosts/fusion/vendor/attr/validators.py
@@ -0,0 +1,379 @@
+"""
+Commonly useful validators.
+"""
+
+from __future__ import absolute_import, division, print_function
+
+import re
+
+from ._make import _AndValidator, and_, attrib, attrs
+from .exceptions import NotCallableError
+
+
+__all__ = [
+ "and_",
+ "deep_iterable",
+ "deep_mapping",
+ "in_",
+ "instance_of",
+ "is_callable",
+ "matches_re",
+ "optional",
+ "provides",
+]
+
+
+@attrs(repr=False, slots=True, hash=True)
+class _InstanceOfValidator(object):
+ type = attrib()
+
+ def __call__(self, inst, attr, value):
+ """
+ We use a callable class to be able to change the ``__repr__``.
+ """
+ if not isinstance(value, self.type):
+ raise TypeError(
+ "'{name}' must be {type!r} (got {value!r} that is a "
+ "{actual!r}).".format(
+ name=attr.name,
+ type=self.type,
+ actual=value.__class__,
+ value=value,
+ ),
+ attr,
+ self.type,
+ value,
+ )
+
+ def __repr__(self):
+ return "".format(
+ type=self.type
+ )
+
+
+def instance_of(type):
+ """
+ A validator that raises a `TypeError` if the initializer is called
+ with a wrong type for this particular attribute (checks are performed using
+ `isinstance` therefore it's also valid to pass a tuple of types).
+
+ :param type: The type to check for.
+ :type type: type or tuple of types
+
+ :raises TypeError: With a human readable error message, the attribute
+ (of type `attr.Attribute`), the expected type, and the value it
+ got.
+ """
+ return _InstanceOfValidator(type)
+
+
+@attrs(repr=False, frozen=True, slots=True)
+class _MatchesReValidator(object):
+ regex = attrib()
+ flags = attrib()
+ match_func = attrib()
+
+ def __call__(self, inst, attr, value):
+ """
+ We use a callable class to be able to change the ``__repr__``.
+ """
+ if not self.match_func(value):
+ raise ValueError(
+ "'{name}' must match regex {regex!r}"
+ " ({value!r} doesn't)".format(
+ name=attr.name, regex=self.regex.pattern, value=value
+ ),
+ attr,
+ self.regex,
+ value,
+ )
+
+ def __repr__(self):
+ return "".format(
+ regex=self.regex
+ )
+
+
+def matches_re(regex, flags=0, func=None):
+ r"""
+ A validator that raises `ValueError` if the initializer is called
+ with a string that doesn't match *regex*.
+
+ :param str regex: a regex string to match against
+ :param int flags: flags that will be passed to the underlying re function
+ (default 0)
+ :param callable func: which underlying `re` function to call (options
+ are `re.fullmatch`, `re.search`, `re.match`, default
+ is ``None`` which means either `re.fullmatch` or an emulation of
+ it on Python 2). For performance reasons, they won't be used directly
+ but on a pre-`re.compile`\ ed pattern.
+
+ .. versionadded:: 19.2.0
+ """
+ fullmatch = getattr(re, "fullmatch", None)
+ valid_funcs = (fullmatch, None, re.search, re.match)
+ if func not in valid_funcs:
+ raise ValueError(
+ "'func' must be one of %s."
+ % (
+ ", ".join(
+ sorted(
+ e and e.__name__ or "None" for e in set(valid_funcs)
+ )
+ ),
+ )
+ )
+
+ pattern = re.compile(regex, flags)
+ if func is re.match:
+ match_func = pattern.match
+ elif func is re.search:
+ match_func = pattern.search
+ else:
+ if fullmatch:
+ match_func = pattern.fullmatch
+ else:
+ pattern = re.compile(r"(?:{})\Z".format(regex), flags)
+ match_func = pattern.match
+
+ return _MatchesReValidator(pattern, flags, match_func)
+
+
+@attrs(repr=False, slots=True, hash=True)
+class _ProvidesValidator(object):
+ interface = attrib()
+
+ def __call__(self, inst, attr, value):
+ """
+ We use a callable class to be able to change the ``__repr__``.
+ """
+ if not self.interface.providedBy(value):
+ raise TypeError(
+ "'{name}' must provide {interface!r} which {value!r} "
+ "doesn't.".format(
+ name=attr.name, interface=self.interface, value=value
+ ),
+ attr,
+ self.interface,
+ value,
+ )
+
+ def __repr__(self):
+ return "".format(
+ interface=self.interface
+ )
+
+
+def provides(interface):
+ """
+ A validator that raises a `TypeError` if the initializer is called
+ with an object that does not provide the requested *interface* (checks are
+ performed using ``interface.providedBy(value)`` (see `zope.interface
+ `_).
+
+ :param interface: The interface to check for.
+ :type interface: ``zope.interface.Interface``
+
+ :raises TypeError: With a human readable error message, the attribute
+ (of type `attr.Attribute`), the expected interface, and the
+ value it got.
+ """
+ return _ProvidesValidator(interface)
+
+
+@attrs(repr=False, slots=True, hash=True)
+class _OptionalValidator(object):
+ validator = attrib()
+
+ def __call__(self, inst, attr, value):
+ if value is None:
+ return
+
+ self.validator(inst, attr, value)
+
+ def __repr__(self):
+ return "".format(
+ what=repr(self.validator)
+ )
+
+
+def optional(validator):
+ """
+ A validator that makes an attribute optional. An optional attribute is one
+ which can be set to ``None`` in addition to satisfying the requirements of
+ the sub-validator.
+
+ :param validator: A validator (or a list of validators) that is used for
+ non-``None`` values.
+ :type validator: callable or `list` of callables.
+
+ .. versionadded:: 15.1.0
+ .. versionchanged:: 17.1.0 *validator* can be a list of validators.
+ """
+ if isinstance(validator, list):
+ return _OptionalValidator(_AndValidator(validator))
+ return _OptionalValidator(validator)
+
+
+@attrs(repr=False, slots=True, hash=True)
+class _InValidator(object):
+ options = attrib()
+
+ def __call__(self, inst, attr, value):
+ try:
+ in_options = value in self.options
+ except TypeError: # e.g. `1 in "abc"`
+ in_options = False
+
+ if not in_options:
+ raise ValueError(
+ "'{name}' must be in {options!r} (got {value!r})".format(
+ name=attr.name, options=self.options, value=value
+ )
+ )
+
+ def __repr__(self):
+ return "".format(
+ options=self.options
+ )
+
+
+def in_(options):
+ """
+ A validator that raises a `ValueError` if the initializer is called
+ with a value that does not belong in the options provided. The check is
+ performed using ``value in options``.
+
+ :param options: Allowed options.
+ :type options: list, tuple, `enum.Enum`, ...
+
+ :raises ValueError: With a human readable error message, the attribute (of
+ type `attr.Attribute`), the expected options, and the value it
+ got.
+
+ .. versionadded:: 17.1.0
+ """
+ return _InValidator(options)
+
+
+@attrs(repr=False, slots=False, hash=True)
+class _IsCallableValidator(object):
+ def __call__(self, inst, attr, value):
+ """
+ We use a callable class to be able to change the ``__repr__``.
+ """
+ if not callable(value):
+ message = (
+ "'{name}' must be callable "
+ "(got {value!r} that is a {actual!r})."
+ )
+ raise NotCallableError(
+ msg=message.format(
+ name=attr.name, value=value, actual=value.__class__
+ ),
+ value=value,
+ )
+
+ def __repr__(self):
+ return ""
+
+
+def is_callable():
+ """
+ A validator that raises a `attr.exceptions.NotCallableError` if the
+ initializer is called with a value for this particular attribute
+ that is not callable.
+
+ .. versionadded:: 19.1.0
+
+ :raises `attr.exceptions.NotCallableError`: With a human readable error
+ message containing the attribute (`attr.Attribute`) name,
+ and the value it got.
+ """
+ return _IsCallableValidator()
+
+
+@attrs(repr=False, slots=True, hash=True)
+class _DeepIterable(object):
+ member_validator = attrib(validator=is_callable())
+ iterable_validator = attrib(
+ default=None, validator=optional(is_callable())
+ )
+
+ def __call__(self, inst, attr, value):
+ """
+ We use a callable class to be able to change the ``__repr__``.
+ """
+ if self.iterable_validator is not None:
+ self.iterable_validator(inst, attr, value)
+
+ for member in value:
+ self.member_validator(inst, attr, member)
+
+ def __repr__(self):
+ iterable_identifier = (
+ ""
+ if self.iterable_validator is None
+ else " {iterable!r}".format(iterable=self.iterable_validator)
+ )
+ return (
+ ""
+ ).format(
+ iterable_identifier=iterable_identifier,
+ member=self.member_validator,
+ )
+
+
+def deep_iterable(member_validator, iterable_validator=None):
+ """
+ A validator that performs deep validation of an iterable.
+
+ :param member_validator: Validator to apply to iterable members
+ :param iterable_validator: Validator to apply to iterable itself
+ (optional)
+
+ .. versionadded:: 19.1.0
+
+ :raises TypeError: if any sub-validators fail
+ """
+ return _DeepIterable(member_validator, iterable_validator)
+
+
+@attrs(repr=False, slots=True, hash=True)
+class _DeepMapping(object):
+ key_validator = attrib(validator=is_callable())
+ value_validator = attrib(validator=is_callable())
+ mapping_validator = attrib(default=None, validator=optional(is_callable()))
+
+ def __call__(self, inst, attr, value):
+ """
+ We use a callable class to be able to change the ``__repr__``.
+ """
+ if self.mapping_validator is not None:
+ self.mapping_validator(inst, attr, value)
+
+ for key in value:
+ self.key_validator(inst, attr, key)
+ self.value_validator(inst, attr, value[key])
+
+ def __repr__(self):
+ return (
+ ""
+ ).format(key=self.key_validator, value=self.value_validator)
+
+
+def deep_mapping(key_validator, value_validator, mapping_validator=None):
+ """
+ A validator that performs deep validation of a dictionary.
+
+ :param key_validator: Validator to apply to dictionary keys
+ :param value_validator: Validator to apply to dictionary values
+ :param mapping_validator: Validator to apply to top-level mapping
+ attribute (optional)
+
+ .. versionadded:: 19.1.0
+
+ :raises TypeError: if any sub-validators fail
+ """
+ return _DeepMapping(key_validator, value_validator, mapping_validator)
diff --git a/openpype/hosts/fusion/vendor/attr/validators.pyi b/openpype/hosts/fusion/vendor/attr/validators.pyi
new file mode 100644
index 0000000000..fe92aac421
--- /dev/null
+++ b/openpype/hosts/fusion/vendor/attr/validators.pyi
@@ -0,0 +1,68 @@
+from typing import (
+ Any,
+ AnyStr,
+ Callable,
+ Container,
+ Iterable,
+ List,
+ Mapping,
+ Match,
+ Optional,
+ Tuple,
+ Type,
+ TypeVar,
+ Union,
+ overload,
+)
+
+from . import _ValidatorType
+
+
+_T = TypeVar("_T")
+_T1 = TypeVar("_T1")
+_T2 = TypeVar("_T2")
+_T3 = TypeVar("_T3")
+_I = TypeVar("_I", bound=Iterable)
+_K = TypeVar("_K")
+_V = TypeVar("_V")
+_M = TypeVar("_M", bound=Mapping)
+
+# To be more precise on instance_of use some overloads.
+# If there are more than 3 items in the tuple then we fall back to Any
+@overload
+def instance_of(type: Type[_T]) -> _ValidatorType[_T]: ...
+@overload
+def instance_of(type: Tuple[Type[_T]]) -> _ValidatorType[_T]: ...
+@overload
+def instance_of(
+ type: Tuple[Type[_T1], Type[_T2]]
+) -> _ValidatorType[Union[_T1, _T2]]: ...
+@overload
+def instance_of(
+ type: Tuple[Type[_T1], Type[_T2], Type[_T3]]
+) -> _ValidatorType[Union[_T1, _T2, _T3]]: ...
+@overload
+def instance_of(type: Tuple[type, ...]) -> _ValidatorType[Any]: ...
+def provides(interface: Any) -> _ValidatorType[Any]: ...
+def optional(
+ validator: Union[_ValidatorType[_T], List[_ValidatorType[_T]]]
+) -> _ValidatorType[Optional[_T]]: ...
+def in_(options: Container[_T]) -> _ValidatorType[_T]: ...
+def and_(*validators: _ValidatorType[_T]) -> _ValidatorType[_T]: ...
+def matches_re(
+ regex: AnyStr,
+ flags: int = ...,
+ func: Optional[
+ Callable[[AnyStr, AnyStr, int], Optional[Match[AnyStr]]]
+ ] = ...,
+) -> _ValidatorType[AnyStr]: ...
+def deep_iterable(
+ member_validator: _ValidatorType[_T],
+ iterable_validator: Optional[_ValidatorType[_I]] = ...,
+) -> _ValidatorType[_I]: ...
+def deep_mapping(
+ key_validator: _ValidatorType[_K],
+ value_validator: _ValidatorType[_V],
+ mapping_validator: Optional[_ValidatorType[_M]] = ...,
+) -> _ValidatorType[_M]: ...
+def is_callable() -> _ValidatorType[_T]: ...
diff --git a/openpype/hosts/fusion/vendor/urllib3/__init__.py b/openpype/hosts/fusion/vendor/urllib3/__init__.py
new file mode 100644
index 0000000000..fe86b59d78
--- /dev/null
+++ b/openpype/hosts/fusion/vendor/urllib3/__init__.py
@@ -0,0 +1,85 @@
+"""
+Python HTTP library with thread-safe connection pooling, file post support, user friendly, and more
+"""
+from __future__ import absolute_import
+
+# Set default logging handler to avoid "No handler found" warnings.
+import logging
+import warnings
+from logging import NullHandler
+
+from . import exceptions
+from ._version import __version__
+from .connectionpool import HTTPConnectionPool, HTTPSConnectionPool, connection_from_url
+from .filepost import encode_multipart_formdata
+from .poolmanager import PoolManager, ProxyManager, proxy_from_url
+from .response import HTTPResponse
+from .util.request import make_headers
+from .util.retry import Retry
+from .util.timeout import Timeout
+from .util.url import get_host
+
+__author__ = "Andrey Petrov (andrey.petrov@shazow.net)"
+__license__ = "MIT"
+__version__ = __version__
+
+__all__ = (
+ "HTTPConnectionPool",
+ "HTTPSConnectionPool",
+ "PoolManager",
+ "ProxyManager",
+ "HTTPResponse",
+ "Retry",
+ "Timeout",
+ "add_stderr_logger",
+ "connection_from_url",
+ "disable_warnings",
+ "encode_multipart_formdata",
+ "get_host",
+ "make_headers",
+ "proxy_from_url",
+)
+
+logging.getLogger(__name__).addHandler(NullHandler())
+
+
+def add_stderr_logger(level=logging.DEBUG):
+ """
+ Helper for quickly adding a StreamHandler to the logger. Useful for
+ debugging.
+
+ Returns the handler after adding it.
+ """
+ # This method needs to be in this __init__.py to get the __name__ correct
+ # even if urllib3 is vendored within another package.
+ logger = logging.getLogger(__name__)
+ handler = logging.StreamHandler()
+ handler.setFormatter(logging.Formatter("%(asctime)s %(levelname)s %(message)s"))
+ logger.addHandler(handler)
+ logger.setLevel(level)
+ logger.debug("Added a stderr logging handler to logger: %s", __name__)
+ return handler
+
+
+# ... Clean up.
+del NullHandler
+
+
+# All warning filters *must* be appended unless you're really certain that they
+# shouldn't be: otherwise, it's very hard for users to use most Python
+# mechanisms to silence them.
+# SecurityWarning's always go off by default.
+warnings.simplefilter("always", exceptions.SecurityWarning, append=True)
+# SubjectAltNameWarning's should go off once per host
+warnings.simplefilter("default", exceptions.SubjectAltNameWarning, append=True)
+# InsecurePlatformWarning's don't vary between requests, so we keep it default.
+warnings.simplefilter("default", exceptions.InsecurePlatformWarning, append=True)
+# SNIMissingWarnings should go off only once.
+warnings.simplefilter("default", exceptions.SNIMissingWarning, append=True)
+
+
+def disable_warnings(category=exceptions.HTTPWarning):
+ """
+ Helper for quickly disabling all urllib3 warnings.
+ """
+ warnings.simplefilter("ignore", category)
diff --git a/openpype/hosts/fusion/vendor/urllib3/_collections.py b/openpype/hosts/fusion/vendor/urllib3/_collections.py
new file mode 100644
index 0000000000..da9857e986
--- /dev/null
+++ b/openpype/hosts/fusion/vendor/urllib3/_collections.py
@@ -0,0 +1,337 @@
+from __future__ import absolute_import
+
+try:
+ from collections.abc import Mapping, MutableMapping
+except ImportError:
+ from collections import Mapping, MutableMapping
+try:
+ from threading import RLock
+except ImportError: # Platform-specific: No threads available
+
+ class RLock:
+ def __enter__(self):
+ pass
+
+ def __exit__(self, exc_type, exc_value, traceback):
+ pass
+
+
+from collections import OrderedDict
+
+from .exceptions import InvalidHeader
+from .packages import six
+from .packages.six import iterkeys, itervalues
+
+__all__ = ["RecentlyUsedContainer", "HTTPHeaderDict"]
+
+
+_Null = object()
+
+
+class RecentlyUsedContainer(MutableMapping):
+ """
+ Provides a thread-safe dict-like container which maintains up to
+ ``maxsize`` keys while throwing away the least-recently-used keys beyond
+ ``maxsize``.
+
+ :param maxsize:
+ Maximum number of recent elements to retain.
+
+ :param dispose_func:
+ Every time an item is evicted from the container,
+ ``dispose_func(value)`` is called. Callback which will get called
+ """
+
+ ContainerCls = OrderedDict
+
+ def __init__(self, maxsize=10, dispose_func=None):
+ self._maxsize = maxsize
+ self.dispose_func = dispose_func
+
+ self._container = self.ContainerCls()
+ self.lock = RLock()
+
+ def __getitem__(self, key):
+ # Re-insert the item, moving it to the end of the eviction line.
+ with self.lock:
+ item = self._container.pop(key)
+ self._container[key] = item
+ return item
+
+ def __setitem__(self, key, value):
+ evicted_value = _Null
+ with self.lock:
+ # Possibly evict the existing value of 'key'
+ evicted_value = self._container.get(key, _Null)
+ self._container[key] = value
+
+ # If we didn't evict an existing value, we might have to evict the
+ # least recently used item from the beginning of the container.
+ if len(self._container) > self._maxsize:
+ _key, evicted_value = self._container.popitem(last=False)
+
+ if self.dispose_func and evicted_value is not _Null:
+ self.dispose_func(evicted_value)
+
+ def __delitem__(self, key):
+ with self.lock:
+ value = self._container.pop(key)
+
+ if self.dispose_func:
+ self.dispose_func(value)
+
+ def __len__(self):
+ with self.lock:
+ return len(self._container)
+
+ def __iter__(self):
+ raise NotImplementedError(
+ "Iteration over this class is unlikely to be threadsafe."
+ )
+
+ def clear(self):
+ with self.lock:
+ # Copy pointers to all values, then wipe the mapping
+ values = list(itervalues(self._container))
+ self._container.clear()
+
+ if self.dispose_func:
+ for value in values:
+ self.dispose_func(value)
+
+ def keys(self):
+ with self.lock:
+ return list(iterkeys(self._container))
+
+
+class HTTPHeaderDict(MutableMapping):
+ """
+ :param headers:
+ An iterable of field-value pairs. Must not contain multiple field names
+ when compared case-insensitively.
+
+ :param kwargs:
+ Additional field-value pairs to pass in to ``dict.update``.
+
+ A ``dict`` like container for storing HTTP Headers.
+
+ Field names are stored and compared case-insensitively in compliance with
+ RFC 7230. Iteration provides the first case-sensitive key seen for each
+ case-insensitive pair.
+
+ Using ``__setitem__`` syntax overwrites fields that compare equal
+ case-insensitively in order to maintain ``dict``'s api. For fields that
+ compare equal, instead create a new ``HTTPHeaderDict`` and use ``.add``
+ in a loop.
+
+ If multiple fields that are equal case-insensitively are passed to the
+ constructor or ``.update``, the behavior is undefined and some will be
+ lost.
+
+ >>> headers = HTTPHeaderDict()
+ >>> headers.add('Set-Cookie', 'foo=bar')
+ >>> headers.add('set-cookie', 'baz=quxx')
+ >>> headers['content-length'] = '7'
+ >>> headers['SET-cookie']
+ 'foo=bar, baz=quxx'
+ >>> headers['Content-Length']
+ '7'
+ """
+
+ def __init__(self, headers=None, **kwargs):
+ super(HTTPHeaderDict, self).__init__()
+ self._container = OrderedDict()
+ if headers is not None:
+ if isinstance(headers, HTTPHeaderDict):
+ self._copy_from(headers)
+ else:
+ self.extend(headers)
+ if kwargs:
+ self.extend(kwargs)
+
+ def __setitem__(self, key, val):
+ self._container[key.lower()] = [key, val]
+ return self._container[key.lower()]
+
+ def __getitem__(self, key):
+ val = self._container[key.lower()]
+ return ", ".join(val[1:])
+
+ def __delitem__(self, key):
+ del self._container[key.lower()]
+
+ def __contains__(self, key):
+ return key.lower() in self._container
+
+ def __eq__(self, other):
+ if not isinstance(other, Mapping) and not hasattr(other, "keys"):
+ return False
+ if not isinstance(other, type(self)):
+ other = type(self)(other)
+ return dict((k.lower(), v) for k, v in self.itermerged()) == dict(
+ (k.lower(), v) for k, v in other.itermerged()
+ )
+
+ def __ne__(self, other):
+ return not self.__eq__(other)
+
+ if six.PY2: # Python 2
+ iterkeys = MutableMapping.iterkeys
+ itervalues = MutableMapping.itervalues
+
+ __marker = object()
+
+ def __len__(self):
+ return len(self._container)
+
+ def __iter__(self):
+ # Only provide the originally cased names
+ for vals in self._container.values():
+ yield vals[0]
+
+ def pop(self, key, default=__marker):
+ """D.pop(k[,d]) -> v, remove specified key and return the corresponding value.
+ If key is not found, d is returned if given, otherwise KeyError is raised.
+ """
+ # Using the MutableMapping function directly fails due to the private marker.
+ # Using ordinary dict.pop would expose the internal structures.
+ # So let's reinvent the wheel.
+ try:
+ value = self[key]
+ except KeyError:
+ if default is self.__marker:
+ raise
+ return default
+ else:
+ del self[key]
+ return value
+
+ def discard(self, key):
+ try:
+ del self[key]
+ except KeyError:
+ pass
+
+ def add(self, key, val):
+ """Adds a (name, value) pair, doesn't overwrite the value if it already
+ exists.
+
+ >>> headers = HTTPHeaderDict(foo='bar')
+ >>> headers.add('Foo', 'baz')
+ >>> headers['foo']
+ 'bar, baz'
+ """
+ key_lower = key.lower()
+ new_vals = [key, val]
+ # Keep the common case aka no item present as fast as possible
+ vals = self._container.setdefault(key_lower, new_vals)
+ if new_vals is not vals:
+ vals.append(val)
+
+ def extend(self, *args, **kwargs):
+ """Generic import function for any type of header-like object.
+ Adapted version of MutableMapping.update in order to insert items
+ with self.add instead of self.__setitem__
+ """
+ if len(args) > 1:
+ raise TypeError(
+ "extend() takes at most 1 positional "
+ "arguments ({0} given)".format(len(args))
+ )
+ other = args[0] if len(args) >= 1 else ()
+
+ if isinstance(other, HTTPHeaderDict):
+ for key, val in other.iteritems():
+ self.add(key, val)
+ elif isinstance(other, Mapping):
+ for key in other:
+ self.add(key, other[key])
+ elif hasattr(other, "keys"):
+ for key in other.keys():
+ self.add(key, other[key])
+ else:
+ for key, value in other:
+ self.add(key, value)
+
+ for key, value in kwargs.items():
+ self.add(key, value)
+
+ def getlist(self, key, default=__marker):
+ """Returns a list of all the values for the named field. Returns an
+ empty list if the key doesn't exist."""
+ try:
+ vals = self._container[key.lower()]
+ except KeyError:
+ if default is self.__marker:
+ return []
+ return default
+ else:
+ return vals[1:]
+
+ # Backwards compatibility for httplib
+ getheaders = getlist
+ getallmatchingheaders = getlist
+ iget = getlist
+
+ # Backwards compatibility for http.cookiejar
+ get_all = getlist
+
+ def __repr__(self):
+ return "%s(%s)" % (type(self).__name__, dict(self.itermerged()))
+
+ def _copy_from(self, other):
+ for key in other:
+ val = other.getlist(key)
+ if isinstance(val, list):
+ # Don't need to convert tuples
+ val = list(val)
+ self._container[key.lower()] = [key] + val
+
+ def copy(self):
+ clone = type(self)()
+ clone._copy_from(self)
+ return clone
+
+ def iteritems(self):
+ """Iterate over all header lines, including duplicate ones."""
+ for key in self:
+ vals = self._container[key.lower()]
+ for val in vals[1:]:
+ yield vals[0], val
+
+ def itermerged(self):
+ """Iterate over all headers, merging duplicate ones together."""
+ for key in self:
+ val = self._container[key.lower()]
+ yield val[0], ", ".join(val[1:])
+
+ def items(self):
+ return list(self.iteritems())
+
+ @classmethod
+ def from_httplib(cls, message): # Python 2
+ """Read headers from a Python 2 httplib message object."""
+ # python2.7 does not expose a proper API for exporting multiheaders
+ # efficiently. This function re-reads raw lines from the message
+ # object and extracts the multiheaders properly.
+ obs_fold_continued_leaders = (" ", "\t")
+ headers = []
+
+ for line in message.headers:
+ if line.startswith(obs_fold_continued_leaders):
+ if not headers:
+ # We received a header line that starts with OWS as described
+ # in RFC-7230 S3.2.4. This indicates a multiline header, but
+ # there exists no previous header to which we can attach it.
+ raise InvalidHeader(
+ "Header continuation with no previous header: %s" % line
+ )
+ else:
+ key, value = headers[-1]
+ headers[-1] = (key, value + " " + line.strip())
+ continue
+
+ key, value = line.split(":", 1)
+ headers.append((key, value.strip()))
+
+ return cls(headers)
diff --git a/openpype/hosts/fusion/vendor/urllib3/_version.py b/openpype/hosts/fusion/vendor/urllib3/_version.py
new file mode 100644
index 0000000000..e8ebee957f
--- /dev/null
+++ b/openpype/hosts/fusion/vendor/urllib3/_version.py
@@ -0,0 +1,2 @@
+# This file is protected via CODEOWNERS
+__version__ = "1.26.6"
diff --git a/openpype/hosts/fusion/vendor/urllib3/connection.py b/openpype/hosts/fusion/vendor/urllib3/connection.py
new file mode 100644
index 0000000000..4c996659c8
--- /dev/null
+++ b/openpype/hosts/fusion/vendor/urllib3/connection.py
@@ -0,0 +1,539 @@
+from __future__ import absolute_import
+
+import datetime
+import logging
+import os
+import re
+import socket
+import warnings
+from socket import error as SocketError
+from socket import timeout as SocketTimeout
+
+from .packages import six
+from .packages.six.moves.http_client import HTTPConnection as _HTTPConnection
+from .packages.six.moves.http_client import HTTPException # noqa: F401
+from .util.proxy import create_proxy_ssl_context
+
+try: # Compiled with SSL?
+ import ssl
+
+ BaseSSLError = ssl.SSLError
+except (ImportError, AttributeError): # Platform-specific: No SSL.
+ ssl = None
+
+ class BaseSSLError(BaseException):
+ pass
+
+
+try:
+ # Python 3: not a no-op, we're adding this to the namespace so it can be imported.
+ ConnectionError = ConnectionError
+except NameError:
+ # Python 2
+ class ConnectionError(Exception):
+ pass
+
+
+try: # Python 3:
+ # Not a no-op, we're adding this to the namespace so it can be imported.
+ BrokenPipeError = BrokenPipeError
+except NameError: # Python 2:
+
+ class BrokenPipeError(Exception):
+ pass
+
+
+from ._collections import HTTPHeaderDict # noqa (historical, removed in v2)
+from ._version import __version__
+from .exceptions import (
+ ConnectTimeoutError,
+ NewConnectionError,
+ SubjectAltNameWarning,
+ SystemTimeWarning,
+)
+from .packages.ssl_match_hostname import CertificateError, match_hostname
+from .util import SKIP_HEADER, SKIPPABLE_HEADERS, connection
+from .util.ssl_ import (
+ assert_fingerprint,
+ create_urllib3_context,
+ resolve_cert_reqs,
+ resolve_ssl_version,
+ ssl_wrap_socket,
+)
+
+log = logging.getLogger(__name__)
+
+port_by_scheme = {"http": 80, "https": 443}
+
+# When it comes time to update this value as a part of regular maintenance
+# (ie test_recent_date is failing) update it to ~6 months before the current date.
+RECENT_DATE = datetime.date(2020, 7, 1)
+
+_CONTAINS_CONTROL_CHAR_RE = re.compile(r"[^-!#$%&'*+.^_`|~0-9a-zA-Z]")
+
+
+class HTTPConnection(_HTTPConnection, object):
+ """
+ Based on :class:`http.client.HTTPConnection` but provides an extra constructor
+ backwards-compatibility layer between older and newer Pythons.
+
+ Additional keyword parameters are used to configure attributes of the connection.
+ Accepted parameters include:
+
+ - ``strict``: See the documentation on :class:`urllib3.connectionpool.HTTPConnectionPool`
+ - ``source_address``: Set the source address for the current connection.
+ - ``socket_options``: Set specific options on the underlying socket. If not specified, then
+ defaults are loaded from ``HTTPConnection.default_socket_options`` which includes disabling
+ Nagle's algorithm (sets TCP_NODELAY to 1) unless the connection is behind a proxy.
+
+ For example, if you wish to enable TCP Keep Alive in addition to the defaults,
+ you might pass:
+
+ .. code-block:: python
+
+ HTTPConnection.default_socket_options + [
+ (socket.SOL_SOCKET, socket.SO_KEEPALIVE, 1),
+ ]
+
+ Or you may want to disable the defaults by passing an empty list (e.g., ``[]``).
+ """
+
+ default_port = port_by_scheme["http"]
+
+ #: Disable Nagle's algorithm by default.
+ #: ``[(socket.IPPROTO_TCP, socket.TCP_NODELAY, 1)]``
+ default_socket_options = [(socket.IPPROTO_TCP, socket.TCP_NODELAY, 1)]
+
+ #: Whether this connection verifies the host's certificate.
+ is_verified = False
+
+ def __init__(self, *args, **kw):
+ if not six.PY2:
+ kw.pop("strict", None)
+
+ # Pre-set source_address.
+ self.source_address = kw.get("source_address")
+
+ #: The socket options provided by the user. If no options are
+ #: provided, we use the default options.
+ self.socket_options = kw.pop("socket_options", self.default_socket_options)
+
+ # Proxy options provided by the user.
+ self.proxy = kw.pop("proxy", None)
+ self.proxy_config = kw.pop("proxy_config", None)
+
+ _HTTPConnection.__init__(self, *args, **kw)
+
+ @property
+ def host(self):
+ """
+ Getter method to remove any trailing dots that indicate the hostname is an FQDN.
+
+ In general, SSL certificates don't include the trailing dot indicating a
+ fully-qualified domain name, and thus, they don't validate properly when
+ checked against a domain name that includes the dot. In addition, some
+ servers may not expect to receive the trailing dot when provided.
+
+ However, the hostname with trailing dot is critical to DNS resolution; doing a
+ lookup with the trailing dot will properly only resolve the appropriate FQDN,
+ whereas a lookup without a trailing dot will search the system's search domain
+ list. Thus, it's important to keep the original host around for use only in
+ those cases where it's appropriate (i.e., when doing DNS lookup to establish the
+ actual TCP connection across which we're going to send HTTP requests).
+ """
+ return self._dns_host.rstrip(".")
+
+ @host.setter
+ def host(self, value):
+ """
+ Setter for the `host` property.
+
+ We assume that only urllib3 uses the _dns_host attribute; httplib itself
+ only uses `host`, and it seems reasonable that other libraries follow suit.
+ """
+ self._dns_host = value
+
+ def _new_conn(self):
+ """Establish a socket connection and set nodelay settings on it.
+
+ :return: New socket connection.
+ """
+ extra_kw = {}
+ if self.source_address:
+ extra_kw["source_address"] = self.source_address
+
+ if self.socket_options:
+ extra_kw["socket_options"] = self.socket_options
+
+ try:
+ conn = connection.create_connection(
+ (self._dns_host, self.port), self.timeout, **extra_kw
+ )
+
+ except SocketTimeout:
+ raise ConnectTimeoutError(
+ self,
+ "Connection to %s timed out. (connect timeout=%s)"
+ % (self.host, self.timeout),
+ )
+
+ except SocketError as e:
+ raise NewConnectionError(
+ self, "Failed to establish a new connection: %s" % e
+ )
+
+ return conn
+
+ def _is_using_tunnel(self):
+ # Google App Engine's httplib does not define _tunnel_host
+ return getattr(self, "_tunnel_host", None)
+
+ def _prepare_conn(self, conn):
+ self.sock = conn
+ if self._is_using_tunnel():
+ # TODO: Fix tunnel so it doesn't depend on self.sock state.
+ self._tunnel()
+ # Mark this connection as not reusable
+ self.auto_open = 0
+
+ def connect(self):
+ conn = self._new_conn()
+ self._prepare_conn(conn)
+
+ def putrequest(self, method, url, *args, **kwargs):
+ """ """
+ # Empty docstring because the indentation of CPython's implementation
+ # is broken but we don't want this method in our documentation.
+ match = _CONTAINS_CONTROL_CHAR_RE.search(method)
+ if match:
+ raise ValueError(
+ "Method cannot contain non-token characters %r (found at least %r)"
+ % (method, match.group())
+ )
+
+ return _HTTPConnection.putrequest(self, method, url, *args, **kwargs)
+
+ def putheader(self, header, *values):
+ """ """
+ if not any(isinstance(v, str) and v == SKIP_HEADER for v in values):
+ _HTTPConnection.putheader(self, header, *values)
+ elif six.ensure_str(header.lower()) not in SKIPPABLE_HEADERS:
+ raise ValueError(
+ "urllib3.util.SKIP_HEADER only supports '%s'"
+ % ("', '".join(map(str.title, sorted(SKIPPABLE_HEADERS))),)
+ )
+
+ def request(self, method, url, body=None, headers=None):
+ if headers is None:
+ headers = {}
+ else:
+ # Avoid modifying the headers passed into .request()
+ headers = headers.copy()
+ if "user-agent" not in (six.ensure_str(k.lower()) for k in headers):
+ headers["User-Agent"] = _get_default_user_agent()
+ super(HTTPConnection, self).request(method, url, body=body, headers=headers)
+
+ def request_chunked(self, method, url, body=None, headers=None):
+ """
+ Alternative to the common request method, which sends the
+ body with chunked encoding and not as one block
+ """
+ headers = headers or {}
+ header_keys = set([six.ensure_str(k.lower()) for k in headers])
+ skip_accept_encoding = "accept-encoding" in header_keys
+ skip_host = "host" in header_keys
+ self.putrequest(
+ method, url, skip_accept_encoding=skip_accept_encoding, skip_host=skip_host
+ )
+ if "user-agent" not in header_keys:
+ self.putheader("User-Agent", _get_default_user_agent())
+ for header, value in headers.items():
+ self.putheader(header, value)
+ if "transfer-encoding" not in header_keys:
+ self.putheader("Transfer-Encoding", "chunked")
+ self.endheaders()
+
+ if body is not None:
+ stringish_types = six.string_types + (bytes,)
+ if isinstance(body, stringish_types):
+ body = (body,)
+ for chunk in body:
+ if not chunk:
+ continue
+ if not isinstance(chunk, bytes):
+ chunk = chunk.encode("utf8")
+ len_str = hex(len(chunk))[2:]
+ to_send = bytearray(len_str.encode())
+ to_send += b"\r\n"
+ to_send += chunk
+ to_send += b"\r\n"
+ self.send(to_send)
+
+ # After the if clause, to always have a closed body
+ self.send(b"0\r\n\r\n")
+
+
+class HTTPSConnection(HTTPConnection):
+ """
+ Many of the parameters to this constructor are passed to the underlying SSL
+ socket by means of :py:func:`urllib3.util.ssl_wrap_socket`.
+ """
+
+ default_port = port_by_scheme["https"]
+
+ cert_reqs = None
+ ca_certs = None
+ ca_cert_dir = None
+ ca_cert_data = None
+ ssl_version = None
+ assert_fingerprint = None
+ tls_in_tls_required = False
+
+ def __init__(
+ self,
+ host,
+ port=None,
+ key_file=None,
+ cert_file=None,
+ key_password=None,
+ strict=None,
+ timeout=socket._GLOBAL_DEFAULT_TIMEOUT,
+ ssl_context=None,
+ server_hostname=None,
+ **kw
+ ):
+
+ HTTPConnection.__init__(self, host, port, strict=strict, timeout=timeout, **kw)
+
+ self.key_file = key_file
+ self.cert_file = cert_file
+ self.key_password = key_password
+ self.ssl_context = ssl_context
+ self.server_hostname = server_hostname
+
+ # Required property for Google AppEngine 1.9.0 which otherwise causes
+ # HTTPS requests to go out as HTTP. (See Issue #356)
+ self._protocol = "https"
+
+ def set_cert(
+ self,
+ key_file=None,
+ cert_file=None,
+ cert_reqs=None,
+ key_password=None,
+ ca_certs=None,
+ assert_hostname=None,
+ assert_fingerprint=None,
+ ca_cert_dir=None,
+ ca_cert_data=None,
+ ):
+ """
+ This method should only be called once, before the connection is used.
+ """
+ # If cert_reqs is not provided we'll assume CERT_REQUIRED unless we also
+ # have an SSLContext object in which case we'll use its verify_mode.
+ if cert_reqs is None:
+ if self.ssl_context is not None:
+ cert_reqs = self.ssl_context.verify_mode
+ else:
+ cert_reqs = resolve_cert_reqs(None)
+
+ self.key_file = key_file
+ self.cert_file = cert_file
+ self.cert_reqs = cert_reqs
+ self.key_password = key_password
+ self.assert_hostname = assert_hostname
+ self.assert_fingerprint = assert_fingerprint
+ self.ca_certs = ca_certs and os.path.expanduser(ca_certs)
+ self.ca_cert_dir = ca_cert_dir and os.path.expanduser(ca_cert_dir)
+ self.ca_cert_data = ca_cert_data
+
+ def connect(self):
+ # Add certificate verification
+ conn = self._new_conn()
+ hostname = self.host
+ tls_in_tls = False
+
+ if self._is_using_tunnel():
+ if self.tls_in_tls_required:
+ conn = self._connect_tls_proxy(hostname, conn)
+ tls_in_tls = True
+
+ self.sock = conn
+
+ # Calls self._set_hostport(), so self.host is
+ # self._tunnel_host below.
+ self._tunnel()
+ # Mark this connection as not reusable
+ self.auto_open = 0
+
+ # Override the host with the one we're requesting data from.
+ hostname = self._tunnel_host
+
+ server_hostname = hostname
+ if self.server_hostname is not None:
+ server_hostname = self.server_hostname
+
+ is_time_off = datetime.date.today() < RECENT_DATE
+ if is_time_off:
+ warnings.warn(
+ (
+ "System time is way off (before {0}). This will probably "
+ "lead to SSL verification errors"
+ ).format(RECENT_DATE),
+ SystemTimeWarning,
+ )
+
+ # Wrap socket using verification with the root certs in
+ # trusted_root_certs
+ default_ssl_context = False
+ if self.ssl_context is None:
+ default_ssl_context = True
+ self.ssl_context = create_urllib3_context(
+ ssl_version=resolve_ssl_version(self.ssl_version),
+ cert_reqs=resolve_cert_reqs(self.cert_reqs),
+ )
+
+ context = self.ssl_context
+ context.verify_mode = resolve_cert_reqs(self.cert_reqs)
+
+ # Try to load OS default certs if none are given.
+ # Works well on Windows (requires Python3.4+)
+ if (
+ not self.ca_certs
+ and not self.ca_cert_dir
+ and not self.ca_cert_data
+ and default_ssl_context
+ and hasattr(context, "load_default_certs")
+ ):
+ context.load_default_certs()
+
+ self.sock = ssl_wrap_socket(
+ sock=conn,
+ keyfile=self.key_file,
+ certfile=self.cert_file,
+ key_password=self.key_password,
+ ca_certs=self.ca_certs,
+ ca_cert_dir=self.ca_cert_dir,
+ ca_cert_data=self.ca_cert_data,
+ server_hostname=server_hostname,
+ ssl_context=context,
+ tls_in_tls=tls_in_tls,
+ )
+
+ # If we're using all defaults and the connection
+ # is TLSv1 or TLSv1.1 we throw a DeprecationWarning
+ # for the host.
+ if (
+ default_ssl_context
+ and self.ssl_version is None
+ and hasattr(self.sock, "version")
+ and self.sock.version() in {"TLSv1", "TLSv1.1"}
+ ):
+ warnings.warn(
+ "Negotiating TLSv1/TLSv1.1 by default is deprecated "
+ "and will be disabled in urllib3 v2.0.0. Connecting to "
+ "'%s' with '%s' can be enabled by explicitly opting-in "
+ "with 'ssl_version'" % (self.host, self.sock.version()),
+ DeprecationWarning,
+ )
+
+ if self.assert_fingerprint:
+ assert_fingerprint(
+ self.sock.getpeercert(binary_form=True), self.assert_fingerprint
+ )
+ elif (
+ context.verify_mode != ssl.CERT_NONE
+ and not getattr(context, "check_hostname", False)
+ and self.assert_hostname is not False
+ ):
+ # While urllib3 attempts to always turn off hostname matching from
+ # the TLS library, this cannot always be done. So we check whether
+ # the TLS Library still thinks it's matching hostnames.
+ cert = self.sock.getpeercert()
+ if not cert.get("subjectAltName", ()):
+ warnings.warn(
+ (
+ "Certificate for {0} has no `subjectAltName`, falling back to check for a "
+ "`commonName` for now. This feature is being removed by major browsers and "
+ "deprecated by RFC 2818. (See https://github.com/urllib3/urllib3/issues/497 "
+ "for details.)".format(hostname)
+ ),
+ SubjectAltNameWarning,
+ )
+ _match_hostname(cert, self.assert_hostname or server_hostname)
+
+ self.is_verified = (
+ context.verify_mode == ssl.CERT_REQUIRED
+ or self.assert_fingerprint is not None
+ )
+
+ def _connect_tls_proxy(self, hostname, conn):
+ """
+ Establish a TLS connection to the proxy using the provided SSL context.
+ """
+ proxy_config = self.proxy_config
+ ssl_context = proxy_config.ssl_context
+ if ssl_context:
+ # If the user provided a proxy context, we assume CA and client
+ # certificates have already been set
+ return ssl_wrap_socket(
+ sock=conn,
+ server_hostname=hostname,
+ ssl_context=ssl_context,
+ )
+
+ ssl_context = create_proxy_ssl_context(
+ self.ssl_version,
+ self.cert_reqs,
+ self.ca_certs,
+ self.ca_cert_dir,
+ self.ca_cert_data,
+ )
+ # By default urllib3's SSLContext disables `check_hostname` and uses
+ # a custom check. For proxies we're good with relying on the default
+ # verification.
+ ssl_context.check_hostname = True
+
+ # If no cert was provided, use only the default options for server
+ # certificate validation
+ return ssl_wrap_socket(
+ sock=conn,
+ ca_certs=self.ca_certs,
+ ca_cert_dir=self.ca_cert_dir,
+ ca_cert_data=self.ca_cert_data,
+ server_hostname=hostname,
+ ssl_context=ssl_context,
+ )
+
+
+def _match_hostname(cert, asserted_hostname):
+ try:
+ match_hostname(cert, asserted_hostname)
+ except CertificateError as e:
+ log.warning(
+ "Certificate did not match expected hostname: %s. Certificate: %s",
+ asserted_hostname,
+ cert,
+ )
+ # Add cert to exception and reraise so client code can inspect
+ # the cert when catching the exception, if they want to
+ e._peer_cert = cert
+ raise
+
+
+def _get_default_user_agent():
+ return "python-urllib3/%s" % __version__
+
+
+class DummyConnection(object):
+ """Used to detect a failed ConnectionCls import."""
+
+ pass
+
+
+if not ssl:
+ HTTPSConnection = DummyConnection # noqa: F811
+
+
+VerifiedHTTPSConnection = HTTPSConnection
diff --git a/openpype/hosts/fusion/vendor/urllib3/connectionpool.py b/openpype/hosts/fusion/vendor/urllib3/connectionpool.py
new file mode 100644
index 0000000000..459bbe095b
--- /dev/null
+++ b/openpype/hosts/fusion/vendor/urllib3/connectionpool.py
@@ -0,0 +1,1067 @@
+from __future__ import absolute_import
+
+import errno
+import logging
+import socket
+import sys
+import warnings
+from socket import error as SocketError
+from socket import timeout as SocketTimeout
+
+from .connection import (
+ BaseSSLError,
+ BrokenPipeError,
+ DummyConnection,
+ HTTPConnection,
+ HTTPException,
+ HTTPSConnection,
+ VerifiedHTTPSConnection,
+ port_by_scheme,
+)
+from .exceptions import (
+ ClosedPoolError,
+ EmptyPoolError,
+ HeaderParsingError,
+ HostChangedError,
+ InsecureRequestWarning,
+ LocationValueError,
+ MaxRetryError,
+ NewConnectionError,
+ ProtocolError,
+ ProxyError,
+ ReadTimeoutError,
+ SSLError,
+ TimeoutError,
+)
+from .packages import six
+from .packages.six.moves import queue
+from .packages.ssl_match_hostname import CertificateError
+from .request import RequestMethods
+from .response import HTTPResponse
+from .util.connection import is_connection_dropped
+from .util.proxy import connection_requires_http_tunnel
+from .util.queue import LifoQueue
+from .util.request import set_file_position
+from .util.response import assert_header_parsing
+from .util.retry import Retry
+from .util.timeout import Timeout
+from .util.url import Url, _encode_target
+from .util.url import _normalize_host as normalize_host
+from .util.url import get_host, parse_url
+
+xrange = six.moves.xrange
+
+log = logging.getLogger(__name__)
+
+_Default = object()
+
+
+# Pool objects
+class ConnectionPool(object):
+ """
+ Base class for all connection pools, such as
+ :class:`.HTTPConnectionPool` and :class:`.HTTPSConnectionPool`.
+
+ .. note::
+ ConnectionPool.urlopen() does not normalize or percent-encode target URIs
+ which is useful if your target server doesn't support percent-encoded
+ target URIs.
+ """
+
+ scheme = None
+ QueueCls = LifoQueue
+
+ def __init__(self, host, port=None):
+ if not host:
+ raise LocationValueError("No host specified.")
+
+ self.host = _normalize_host(host, scheme=self.scheme)
+ self._proxy_host = host.lower()
+ self.port = port
+
+ def __str__(self):
+ return "%s(host=%r, port=%r)" % (type(self).__name__, self.host, self.port)
+
+ def __enter__(self):
+ return self
+
+ def __exit__(self, exc_type, exc_val, exc_tb):
+ self.close()
+ # Return False to re-raise any potential exceptions
+ return False
+
+ def close(self):
+ """
+ Close all pooled connections and disable the pool.
+ """
+ pass
+
+
+# This is taken from http://hg.python.org/cpython/file/7aaba721ebc0/Lib/socket.py#l252
+_blocking_errnos = {errno.EAGAIN, errno.EWOULDBLOCK}
+
+
+class HTTPConnectionPool(ConnectionPool, RequestMethods):
+ """
+ Thread-safe connection pool for one host.
+
+ :param host:
+ Host used for this HTTP Connection (e.g. "localhost"), passed into
+ :class:`http.client.HTTPConnection`.
+
+ :param port:
+ Port used for this HTTP Connection (None is equivalent to 80), passed
+ into :class:`http.client.HTTPConnection`.
+
+ :param strict:
+ Causes BadStatusLine to be raised if the status line can't be parsed
+ as a valid HTTP/1.0 or 1.1 status line, passed into
+ :class:`http.client.HTTPConnection`.
+
+ .. note::
+ Only works in Python 2. This parameter is ignored in Python 3.
+
+ :param timeout:
+ Socket timeout in seconds for each individual connection. This can
+ be a float or integer, which sets the timeout for the HTTP request,
+ or an instance of :class:`urllib3.util.Timeout` which gives you more
+ fine-grained control over request timeouts. After the constructor has
+ been parsed, this is always a `urllib3.util.Timeout` object.
+
+ :param maxsize:
+ Number of connections to save that can be reused. More than 1 is useful
+ in multithreaded situations. If ``block`` is set to False, more
+ connections will be created but they will not be saved once they've
+ been used.
+
+ :param block:
+ If set to True, no more than ``maxsize`` connections will be used at
+ a time. When no free connections are available, the call will block
+ until a connection has been released. This is a useful side effect for
+ particular multithreaded situations where one does not want to use more
+ than maxsize connections per host to prevent flooding.
+
+ :param headers:
+ Headers to include with all requests, unless other headers are given
+ explicitly.
+
+ :param retries:
+ Retry configuration to use by default with requests in this pool.
+
+ :param _proxy:
+ Parsed proxy URL, should not be used directly, instead, see
+ :class:`urllib3.ProxyManager`
+
+ :param _proxy_headers:
+ A dictionary with proxy headers, should not be used directly,
+ instead, see :class:`urllib3.ProxyManager`
+
+ :param \\**conn_kw:
+ Additional parameters are used to create fresh :class:`urllib3.connection.HTTPConnection`,
+ :class:`urllib3.connection.HTTPSConnection` instances.
+ """
+
+ scheme = "http"
+ ConnectionCls = HTTPConnection
+ ResponseCls = HTTPResponse
+
+ def __init__(
+ self,
+ host,
+ port=None,
+ strict=False,
+ timeout=Timeout.DEFAULT_TIMEOUT,
+ maxsize=1,
+ block=False,
+ headers=None,
+ retries=None,
+ _proxy=None,
+ _proxy_headers=None,
+ _proxy_config=None,
+ **conn_kw
+ ):
+ ConnectionPool.__init__(self, host, port)
+ RequestMethods.__init__(self, headers)
+
+ self.strict = strict
+
+ if not isinstance(timeout, Timeout):
+ timeout = Timeout.from_float(timeout)
+
+ if retries is None:
+ retries = Retry.DEFAULT
+
+ self.timeout = timeout
+ self.retries = retries
+
+ self.pool = self.QueueCls(maxsize)
+ self.block = block
+
+ self.proxy = _proxy
+ self.proxy_headers = _proxy_headers or {}
+ self.proxy_config = _proxy_config
+
+ # Fill the queue up so that doing get() on it will block properly
+ for _ in xrange(maxsize):
+ self.pool.put(None)
+
+ # These are mostly for testing and debugging purposes.
+ self.num_connections = 0
+ self.num_requests = 0
+ self.conn_kw = conn_kw
+
+ if self.proxy:
+ # Enable Nagle's algorithm for proxies, to avoid packet fragmentation.
+ # We cannot know if the user has added default socket options, so we cannot replace the
+ # list.
+ self.conn_kw.setdefault("socket_options", [])
+
+ self.conn_kw["proxy"] = self.proxy
+ self.conn_kw["proxy_config"] = self.proxy_config
+
+ def _new_conn(self):
+ """
+ Return a fresh :class:`HTTPConnection`.
+ """
+ self.num_connections += 1
+ log.debug(
+ "Starting new HTTP connection (%d): %s:%s",
+ self.num_connections,
+ self.host,
+ self.port or "80",
+ )
+
+ conn = self.ConnectionCls(
+ host=self.host,
+ port=self.port,
+ timeout=self.timeout.connect_timeout,
+ strict=self.strict,
+ **self.conn_kw
+ )
+ return conn
+
+ def _get_conn(self, timeout=None):
+ """
+ Get a connection. Will return a pooled connection if one is available.
+
+ If no connections are available and :prop:`.block` is ``False``, then a
+ fresh connection is returned.
+
+ :param timeout:
+ Seconds to wait before giving up and raising
+ :class:`urllib3.exceptions.EmptyPoolError` if the pool is empty and
+ :prop:`.block` is ``True``.
+ """
+ conn = None
+ try:
+ conn = self.pool.get(block=self.block, timeout=timeout)
+
+ except AttributeError: # self.pool is None
+ raise ClosedPoolError(self, "Pool is closed.")
+
+ except queue.Empty:
+ if self.block:
+ raise EmptyPoolError(
+ self,
+ "Pool reached maximum size and no more connections are allowed.",
+ )
+ pass # Oh well, we'll create a new connection then
+
+ # If this is a persistent connection, check if it got disconnected
+ if conn and is_connection_dropped(conn):
+ log.debug("Resetting dropped connection: %s", self.host)
+ conn.close()
+ if getattr(conn, "auto_open", 1) == 0:
+ # This is a proxied connection that has been mutated by
+ # http.client._tunnel() and cannot be reused (since it would
+ # attempt to bypass the proxy)
+ conn = None
+
+ return conn or self._new_conn()
+
+ def _put_conn(self, conn):
+ """
+ Put a connection back into the pool.
+
+ :param conn:
+ Connection object for the current host and port as returned by
+ :meth:`._new_conn` or :meth:`._get_conn`.
+
+ If the pool is already full, the connection is closed and discarded
+ because we exceeded maxsize. If connections are discarded frequently,
+ then maxsize should be increased.
+
+ If the pool is closed, then the connection will be closed and discarded.
+ """
+ try:
+ self.pool.put(conn, block=False)
+ return # Everything is dandy, done.
+ except AttributeError:
+ # self.pool is None.
+ pass
+ except queue.Full:
+ # This should never happen if self.block == True
+ log.warning("Connection pool is full, discarding connection: %s", self.host)
+
+ # Connection never got put back into the pool, close it.
+ if conn:
+ conn.close()
+
+ def _validate_conn(self, conn):
+ """
+ Called right before a request is made, after the socket is created.
+ """
+ pass
+
+ def _prepare_proxy(self, conn):
+ # Nothing to do for HTTP connections.
+ pass
+
+ def _get_timeout(self, timeout):
+ """Helper that always returns a :class:`urllib3.util.Timeout`"""
+ if timeout is _Default:
+ return self.timeout.clone()
+
+ if isinstance(timeout, Timeout):
+ return timeout.clone()
+ else:
+ # User passed us an int/float. This is for backwards compatibility,
+ # can be removed later
+ return Timeout.from_float(timeout)
+
+ def _raise_timeout(self, err, url, timeout_value):
+ """Is the error actually a timeout? Will raise a ReadTimeout or pass"""
+
+ if isinstance(err, SocketTimeout):
+ raise ReadTimeoutError(
+ self, url, "Read timed out. (read timeout=%s)" % timeout_value
+ )
+
+ # See the above comment about EAGAIN in Python 3. In Python 2 we have
+ # to specifically catch it and throw the timeout error
+ if hasattr(err, "errno") and err.errno in _blocking_errnos:
+ raise ReadTimeoutError(
+ self, url, "Read timed out. (read timeout=%s)" % timeout_value
+ )
+
+ # Catch possible read timeouts thrown as SSL errors. If not the
+ # case, rethrow the original. We need to do this because of:
+ # http://bugs.python.org/issue10272
+ if "timed out" in str(err) or "did not complete (read)" in str(
+ err
+ ): # Python < 2.7.4
+ raise ReadTimeoutError(
+ self, url, "Read timed out. (read timeout=%s)" % timeout_value
+ )
+
+ def _make_request(
+ self, conn, method, url, timeout=_Default, chunked=False, **httplib_request_kw
+ ):
+ """
+ Perform a request on a given urllib connection object taken from our
+ pool.
+
+ :param conn:
+ a connection from one of our connection pools
+
+ :param timeout:
+ Socket timeout in seconds for the request. This can be a
+ float or integer, which will set the same timeout value for
+ the socket connect and the socket read, or an instance of
+ :class:`urllib3.util.Timeout`, which gives you more fine-grained
+ control over your timeouts.
+ """
+ self.num_requests += 1
+
+ timeout_obj = self._get_timeout(timeout)
+ timeout_obj.start_connect()
+ conn.timeout = timeout_obj.connect_timeout
+
+ # Trigger any extra validation we need to do.
+ try:
+ self._validate_conn(conn)
+ except (SocketTimeout, BaseSSLError) as e:
+ # Py2 raises this as a BaseSSLError, Py3 raises it as socket timeout.
+ self._raise_timeout(err=e, url=url, timeout_value=conn.timeout)
+ raise
+
+ # conn.request() calls http.client.*.request, not the method in
+ # urllib3.request. It also calls makefile (recv) on the socket.
+ try:
+ if chunked:
+ conn.request_chunked(method, url, **httplib_request_kw)
+ else:
+ conn.request(method, url, **httplib_request_kw)
+
+ # We are swallowing BrokenPipeError (errno.EPIPE) since the server is
+ # legitimately able to close the connection after sending a valid response.
+ # With this behaviour, the received response is still readable.
+ except BrokenPipeError:
+ # Python 3
+ pass
+ except IOError as e:
+ # Python 2 and macOS/Linux
+ # EPIPE and ESHUTDOWN are BrokenPipeError on Python 2, and EPROTOTYPE is needed on macOS
+ # https://erickt.github.io/blog/2014/11/19/adventures-in-debugging-a-potential-osx-kernel-bug/
+ if e.errno not in {
+ errno.EPIPE,
+ errno.ESHUTDOWN,
+ errno.EPROTOTYPE,
+ }:
+ raise
+
+ # Reset the timeout for the recv() on the socket
+ read_timeout = timeout_obj.read_timeout
+
+ # App Engine doesn't have a sock attr
+ if getattr(conn, "sock", None):
+ # In Python 3 socket.py will catch EAGAIN and return None when you
+ # try and read into the file pointer created by http.client, which
+ # instead raises a BadStatusLine exception. Instead of catching
+ # the exception and assuming all BadStatusLine exceptions are read
+ # timeouts, check for a zero timeout before making the request.
+ if read_timeout == 0:
+ raise ReadTimeoutError(
+ self, url, "Read timed out. (read timeout=%s)" % read_timeout
+ )
+ if read_timeout is Timeout.DEFAULT_TIMEOUT:
+ conn.sock.settimeout(socket.getdefaulttimeout())
+ else: # None or a value
+ conn.sock.settimeout(read_timeout)
+
+ # Receive the response from the server
+ try:
+ try:
+ # Python 2.7, use buffering of HTTP responses
+ httplib_response = conn.getresponse(buffering=True)
+ except TypeError:
+ # Python 3
+ try:
+ httplib_response = conn.getresponse()
+ except BaseException as e:
+ # Remove the TypeError from the exception chain in
+ # Python 3 (including for exceptions like SystemExit).
+ # Otherwise it looks like a bug in the code.
+ six.raise_from(e, None)
+ except (SocketTimeout, BaseSSLError, SocketError) as e:
+ self._raise_timeout(err=e, url=url, timeout_value=read_timeout)
+ raise
+
+ # AppEngine doesn't have a version attr.
+ http_version = getattr(conn, "_http_vsn_str", "HTTP/?")
+ log.debug(
+ '%s://%s:%s "%s %s %s" %s %s',
+ self.scheme,
+ self.host,
+ self.port,
+ method,
+ url,
+ http_version,
+ httplib_response.status,
+ httplib_response.length,
+ )
+
+ try:
+ assert_header_parsing(httplib_response.msg)
+ except (HeaderParsingError, TypeError) as hpe: # Platform-specific: Python 3
+ log.warning(
+ "Failed to parse headers (url=%s): %s",
+ self._absolute_url(url),
+ hpe,
+ exc_info=True,
+ )
+
+ return httplib_response
+
+ def _absolute_url(self, path):
+ return Url(scheme=self.scheme, host=self.host, port=self.port, path=path).url
+
+ def close(self):
+ """
+ Close all pooled connections and disable the pool.
+ """
+ if self.pool is None:
+ return
+ # Disable access to the pool
+ old_pool, self.pool = self.pool, None
+
+ try:
+ while True:
+ conn = old_pool.get(block=False)
+ if conn:
+ conn.close()
+
+ except queue.Empty:
+ pass # Done.
+
+ def is_same_host(self, url):
+ """
+ Check if the given ``url`` is a member of the same host as this
+ connection pool.
+ """
+ if url.startswith("/"):
+ return True
+
+ # TODO: Add optional support for socket.gethostbyname checking.
+ scheme, host, port = get_host(url)
+ if host is not None:
+ host = _normalize_host(host, scheme=scheme)
+
+ # Use explicit default port for comparison when none is given
+ if self.port and not port:
+ port = port_by_scheme.get(scheme)
+ elif not self.port and port == port_by_scheme.get(scheme):
+ port = None
+
+ return (scheme, host, port) == (self.scheme, self.host, self.port)
+
+ def urlopen(
+ self,
+ method,
+ url,
+ body=None,
+ headers=None,
+ retries=None,
+ redirect=True,
+ assert_same_host=True,
+ timeout=_Default,
+ pool_timeout=None,
+ release_conn=None,
+ chunked=False,
+ body_pos=None,
+ **response_kw
+ ):
+ """
+ Get a connection from the pool and perform an HTTP request. This is the
+ lowest level call for making a request, so you'll need to specify all
+ the raw details.
+
+ .. note::
+
+ More commonly, it's appropriate to use a convenience method provided
+ by :class:`.RequestMethods`, such as :meth:`request`.
+
+ .. note::
+
+ `release_conn` will only behave as expected if
+ `preload_content=False` because we want to make
+ `preload_content=False` the default behaviour someday soon without
+ breaking backwards compatibility.
+
+ :param method:
+ HTTP request method (such as GET, POST, PUT, etc.)
+
+ :param url:
+ The URL to perform the request on.
+
+ :param body:
+ Data to send in the request body, either :class:`str`, :class:`bytes`,
+ an iterable of :class:`str`/:class:`bytes`, or a file-like object.
+
+ :param headers:
+ Dictionary of custom headers to send, such as User-Agent,
+ If-None-Match, etc. If None, pool headers are used. If provided,
+ these headers completely replace any pool-specific headers.
+
+ :param retries:
+ Configure the number of retries to allow before raising a
+ :class:`~urllib3.exceptions.MaxRetryError` exception.
+
+ Pass ``None`` to retry until you receive a response. Pass a
+ :class:`~urllib3.util.retry.Retry` object for fine-grained control
+ over different types of retries.
+ Pass an integer number to retry connection errors that many times,
+ but no other types of errors. Pass zero to never retry.
+
+ If ``False``, then retries are disabled and any exception is raised
+ immediately. Also, instead of raising a MaxRetryError on redirects,
+ the redirect response will be returned.
+
+ :type retries: :class:`~urllib3.util.retry.Retry`, False, or an int.
+
+ :param redirect:
+ If True, automatically handle redirects (status codes 301, 302,
+ 303, 307, 308). Each redirect counts as a retry. Disabling retries
+ will disable redirect, too.
+
+ :param assert_same_host:
+ If ``True``, will make sure that the host of the pool requests is
+ consistent else will raise HostChangedError. When ``False``, you can
+ use the pool on an HTTP proxy and request foreign hosts.
+
+ :param timeout:
+ If specified, overrides the default timeout for this one
+ request. It may be a float (in seconds) or an instance of
+ :class:`urllib3.util.Timeout`.
+
+ :param pool_timeout:
+ If set and the pool is set to block=True, then this method will
+ block for ``pool_timeout`` seconds and raise EmptyPoolError if no
+ connection is available within the time period.
+
+ :param release_conn:
+ If False, then the urlopen call will not release the connection
+ back into the pool once a response is received (but will release if
+ you read the entire contents of the response such as when
+ `preload_content=True`). This is useful if you're not preloading
+ the response's content immediately. You will need to call
+ ``r.release_conn()`` on the response ``r`` to return the connection
+ back into the pool. If None, it takes the value of
+ ``response_kw.get('preload_content', True)``.
+
+ :param chunked:
+ If True, urllib3 will send the body using chunked transfer
+ encoding. Otherwise, urllib3 will send the body using the standard
+ content-length form. Defaults to False.
+
+ :param int body_pos:
+ Position to seek to in file-like body in the event of a retry or
+ redirect. Typically this won't need to be set because urllib3 will
+ auto-populate the value when needed.
+
+ :param \\**response_kw:
+ Additional parameters are passed to
+ :meth:`urllib3.response.HTTPResponse.from_httplib`
+ """
+
+ parsed_url = parse_url(url)
+ destination_scheme = parsed_url.scheme
+
+ if headers is None:
+ headers = self.headers
+
+ if not isinstance(retries, Retry):
+ retries = Retry.from_int(retries, redirect=redirect, default=self.retries)
+
+ if release_conn is None:
+ release_conn = response_kw.get("preload_content", True)
+
+ # Check host
+ if assert_same_host and not self.is_same_host(url):
+ raise HostChangedError(self, url, retries)
+
+ # Ensure that the URL we're connecting to is properly encoded
+ if url.startswith("/"):
+ url = six.ensure_str(_encode_target(url))
+ else:
+ url = six.ensure_str(parsed_url.url)
+
+ conn = None
+
+ # Track whether `conn` needs to be released before
+ # returning/raising/recursing. Update this variable if necessary, and
+ # leave `release_conn` constant throughout the function. That way, if
+ # the function recurses, the original value of `release_conn` will be
+ # passed down into the recursive call, and its value will be respected.
+ #
+ # See issue #651 [1] for details.
+ #
+ # [1]
+ release_this_conn = release_conn
+
+ http_tunnel_required = connection_requires_http_tunnel(
+ self.proxy, self.proxy_config, destination_scheme
+ )
+
+ # Merge the proxy headers. Only done when not using HTTP CONNECT. We
+ # have to copy the headers dict so we can safely change it without those
+ # changes being reflected in anyone else's copy.
+ if not http_tunnel_required:
+ headers = headers.copy()
+ headers.update(self.proxy_headers)
+
+ # Must keep the exception bound to a separate variable or else Python 3
+ # complains about UnboundLocalError.
+ err = None
+
+ # Keep track of whether we cleanly exited the except block. This
+ # ensures we do proper cleanup in finally.
+ clean_exit = False
+
+ # Rewind body position, if needed. Record current position
+ # for future rewinds in the event of a redirect/retry.
+ body_pos = set_file_position(body, body_pos)
+
+ try:
+ # Request a connection from the queue.
+ timeout_obj = self._get_timeout(timeout)
+ conn = self._get_conn(timeout=pool_timeout)
+
+ conn.timeout = timeout_obj.connect_timeout
+
+ is_new_proxy_conn = self.proxy is not None and not getattr(
+ conn, "sock", None
+ )
+ if is_new_proxy_conn and http_tunnel_required:
+ self._prepare_proxy(conn)
+
+ # Make the request on the httplib connection object.
+ httplib_response = self._make_request(
+ conn,
+ method,
+ url,
+ timeout=timeout_obj,
+ body=body,
+ headers=headers,
+ chunked=chunked,
+ )
+
+ # If we're going to release the connection in ``finally:``, then
+ # the response doesn't need to know about the connection. Otherwise
+ # it will also try to release it and we'll have a double-release
+ # mess.
+ response_conn = conn if not release_conn else None
+
+ # Pass method to Response for length checking
+ response_kw["request_method"] = method
+
+ # Import httplib's response into our own wrapper object
+ response = self.ResponseCls.from_httplib(
+ httplib_response,
+ pool=self,
+ connection=response_conn,
+ retries=retries,
+ **response_kw
+ )
+
+ # Everything went great!
+ clean_exit = True
+
+ except EmptyPoolError:
+ # Didn't get a connection from the pool, no need to clean up
+ clean_exit = True
+ release_this_conn = False
+ raise
+
+ except (
+ TimeoutError,
+ HTTPException,
+ SocketError,
+ ProtocolError,
+ BaseSSLError,
+ SSLError,
+ CertificateError,
+ ) as e:
+ # Discard the connection for these exceptions. It will be
+ # replaced during the next _get_conn() call.
+ clean_exit = False
+ if isinstance(e, (BaseSSLError, CertificateError)):
+ e = SSLError(e)
+ elif isinstance(e, (SocketError, NewConnectionError)) and self.proxy:
+ e = ProxyError("Cannot connect to proxy.", e)
+ elif isinstance(e, (SocketError, HTTPException)):
+ e = ProtocolError("Connection aborted.", e)
+
+ retries = retries.increment(
+ method, url, error=e, _pool=self, _stacktrace=sys.exc_info()[2]
+ )
+ retries.sleep()
+
+ # Keep track of the error for the retry warning.
+ err = e
+
+ finally:
+ if not clean_exit:
+ # We hit some kind of exception, handled or otherwise. We need
+ # to throw the connection away unless explicitly told not to.
+ # Close the connection, set the variable to None, and make sure
+ # we put the None back in the pool to avoid leaking it.
+ conn = conn and conn.close()
+ release_this_conn = True
+
+ if release_this_conn:
+ # Put the connection back to be reused. If the connection is
+ # expired then it will be None, which will get replaced with a
+ # fresh connection during _get_conn.
+ self._put_conn(conn)
+
+ if not conn:
+ # Try again
+ log.warning(
+ "Retrying (%r) after connection broken by '%r': %s", retries, err, url
+ )
+ return self.urlopen(
+ method,
+ url,
+ body,
+ headers,
+ retries,
+ redirect,
+ assert_same_host,
+ timeout=timeout,
+ pool_timeout=pool_timeout,
+ release_conn=release_conn,
+ chunked=chunked,
+ body_pos=body_pos,
+ **response_kw
+ )
+
+ # Handle redirect?
+ redirect_location = redirect and response.get_redirect_location()
+ if redirect_location:
+ if response.status == 303:
+ method = "GET"
+
+ try:
+ retries = retries.increment(method, url, response=response, _pool=self)
+ except MaxRetryError:
+ if retries.raise_on_redirect:
+ response.drain_conn()
+ raise
+ return response
+
+ response.drain_conn()
+ retries.sleep_for_retry(response)
+ log.debug("Redirecting %s -> %s", url, redirect_location)
+ return self.urlopen(
+ method,
+ redirect_location,
+ body,
+ headers,
+ retries=retries,
+ redirect=redirect,
+ assert_same_host=assert_same_host,
+ timeout=timeout,
+ pool_timeout=pool_timeout,
+ release_conn=release_conn,
+ chunked=chunked,
+ body_pos=body_pos,
+ **response_kw
+ )
+
+ # Check if we should retry the HTTP response.
+ has_retry_after = bool(response.getheader("Retry-After"))
+ if retries.is_retry(method, response.status, has_retry_after):
+ try:
+ retries = retries.increment(method, url, response=response, _pool=self)
+ except MaxRetryError:
+ if retries.raise_on_status:
+ response.drain_conn()
+ raise
+ return response
+
+ response.drain_conn()
+ retries.sleep(response)
+ log.debug("Retry: %s", url)
+ return self.urlopen(
+ method,
+ url,
+ body,
+ headers,
+ retries=retries,
+ redirect=redirect,
+ assert_same_host=assert_same_host,
+ timeout=timeout,
+ pool_timeout=pool_timeout,
+ release_conn=release_conn,
+ chunked=chunked,
+ body_pos=body_pos,
+ **response_kw
+ )
+
+ return response
+
+
+class HTTPSConnectionPool(HTTPConnectionPool):
+ """
+ Same as :class:`.HTTPConnectionPool`, but HTTPS.
+
+ :class:`.HTTPSConnection` uses one of ``assert_fingerprint``,
+ ``assert_hostname`` and ``host`` in this order to verify connections.
+ If ``assert_hostname`` is False, no verification is done.
+
+ The ``key_file``, ``cert_file``, ``cert_reqs``, ``ca_certs``,
+ ``ca_cert_dir``, ``ssl_version``, ``key_password`` are only used if :mod:`ssl`
+ is available and are fed into :meth:`urllib3.util.ssl_wrap_socket` to upgrade
+ the connection socket into an SSL socket.
+ """
+
+ scheme = "https"
+ ConnectionCls = HTTPSConnection
+
+ def __init__(
+ self,
+ host,
+ port=None,
+ strict=False,
+ timeout=Timeout.DEFAULT_TIMEOUT,
+ maxsize=1,
+ block=False,
+ headers=None,
+ retries=None,
+ _proxy=None,
+ _proxy_headers=None,
+ key_file=None,
+ cert_file=None,
+ cert_reqs=None,
+ key_password=None,
+ ca_certs=None,
+ ssl_version=None,
+ assert_hostname=None,
+ assert_fingerprint=None,
+ ca_cert_dir=None,
+ **conn_kw
+ ):
+
+ HTTPConnectionPool.__init__(
+ self,
+ host,
+ port,
+ strict,
+ timeout,
+ maxsize,
+ block,
+ headers,
+ retries,
+ _proxy,
+ _proxy_headers,
+ **conn_kw
+ )
+
+ self.key_file = key_file
+ self.cert_file = cert_file
+ self.cert_reqs = cert_reqs
+ self.key_password = key_password
+ self.ca_certs = ca_certs
+ self.ca_cert_dir = ca_cert_dir
+ self.ssl_version = ssl_version
+ self.assert_hostname = assert_hostname
+ self.assert_fingerprint = assert_fingerprint
+
+ def _prepare_conn(self, conn):
+ """
+ Prepare the ``connection`` for :meth:`urllib3.util.ssl_wrap_socket`
+ and establish the tunnel if proxy is used.
+ """
+
+ if isinstance(conn, VerifiedHTTPSConnection):
+ conn.set_cert(
+ key_file=self.key_file,
+ key_password=self.key_password,
+ cert_file=self.cert_file,
+ cert_reqs=self.cert_reqs,
+ ca_certs=self.ca_certs,
+ ca_cert_dir=self.ca_cert_dir,
+ assert_hostname=self.assert_hostname,
+ assert_fingerprint=self.assert_fingerprint,
+ )
+ conn.ssl_version = self.ssl_version
+ return conn
+
+ def _prepare_proxy(self, conn):
+ """
+ Establishes a tunnel connection through HTTP CONNECT.
+
+ Tunnel connection is established early because otherwise httplib would
+ improperly set Host: header to proxy's IP:port.
+ """
+
+ conn.set_tunnel(self._proxy_host, self.port, self.proxy_headers)
+
+ if self.proxy.scheme == "https":
+ conn.tls_in_tls_required = True
+
+ conn.connect()
+
+ def _new_conn(self):
+ """
+ Return a fresh :class:`http.client.HTTPSConnection`.
+ """
+ self.num_connections += 1
+ log.debug(
+ "Starting new HTTPS connection (%d): %s:%s",
+ self.num_connections,
+ self.host,
+ self.port or "443",
+ )
+
+ if not self.ConnectionCls or self.ConnectionCls is DummyConnection:
+ raise SSLError(
+ "Can't connect to HTTPS URL because the SSL module is not available."
+ )
+
+ actual_host = self.host
+ actual_port = self.port
+ if self.proxy is not None:
+ actual_host = self.proxy.host
+ actual_port = self.proxy.port
+
+ conn = self.ConnectionCls(
+ host=actual_host,
+ port=actual_port,
+ timeout=self.timeout.connect_timeout,
+ strict=self.strict,
+ cert_file=self.cert_file,
+ key_file=self.key_file,
+ key_password=self.key_password,
+ **self.conn_kw
+ )
+
+ return self._prepare_conn(conn)
+
+ def _validate_conn(self, conn):
+ """
+ Called right before a request is made, after the socket is created.
+ """
+ super(HTTPSConnectionPool, self)._validate_conn(conn)
+
+ # Force connect early to allow us to validate the connection.
+ if not getattr(conn, "sock", None): # AppEngine might not have `.sock`
+ conn.connect()
+
+ if not conn.is_verified:
+ warnings.warn(
+ (
+ "Unverified HTTPS request is being made to host '%s'. "
+ "Adding certificate verification is strongly advised. See: "
+ "https://urllib3.readthedocs.io/en/1.26.x/advanced-usage.html"
+ "#ssl-warnings" % conn.host
+ ),
+ InsecureRequestWarning,
+ )
+
+
+def connection_from_url(url, **kw):
+ """
+ Given a url, return an :class:`.ConnectionPool` instance of its host.
+
+ This is a shortcut for not having to parse out the scheme, host, and port
+ of the url before creating an :class:`.ConnectionPool` instance.
+
+ :param url:
+ Absolute URL string that must include the scheme. Port is optional.
+
+ :param \\**kw:
+ Passes additional parameters to the constructor of the appropriate
+ :class:`.ConnectionPool`. Useful for specifying things like
+ timeout, maxsize, headers, etc.
+
+ Example::
+
+ >>> conn = connection_from_url('http://google.com/')
+ >>> r = conn.request('GET', '/')
+ """
+ scheme, host, port = get_host(url)
+ port = port or port_by_scheme.get(scheme, 80)
+ if scheme == "https":
+ return HTTPSConnectionPool(host, port=port, **kw)
+ else:
+ return HTTPConnectionPool(host, port=port, **kw)
+
+
+def _normalize_host(host, scheme):
+ """
+ Normalize hosts for comparisons and use with sockets.
+ """
+
+ host = normalize_host(host, scheme)
+
+ # httplib doesn't like it when we include brackets in IPv6 addresses
+ # Specifically, if we include brackets but also pass the port then
+ # httplib crazily doubles up the square brackets on the Host header.
+ # Instead, we need to make sure we never pass ``None`` as the port.
+ # However, for backward compatibility reasons we can't actually
+ # *assert* that. See http://bugs.python.org/issue28539
+ if host.startswith("[") and host.endswith("]"):
+ host = host[1:-1]
+ return host
diff --git a/openpype/hosts/fusion/vendor/urllib3/contrib/__init__.py b/openpype/hosts/fusion/vendor/urllib3/contrib/__init__.py
new file mode 100644
index 0000000000..e69de29bb2
diff --git a/openpype/hosts/fusion/vendor/urllib3/contrib/_appengine_environ.py b/openpype/hosts/fusion/vendor/urllib3/contrib/_appengine_environ.py
new file mode 100644
index 0000000000..8765b907d7
--- /dev/null
+++ b/openpype/hosts/fusion/vendor/urllib3/contrib/_appengine_environ.py
@@ -0,0 +1,36 @@
+"""
+This module provides means to detect the App Engine environment.
+"""
+
+import os
+
+
+def is_appengine():
+ return is_local_appengine() or is_prod_appengine()
+
+
+def is_appengine_sandbox():
+ """Reports if the app is running in the first generation sandbox.
+
+ The second generation runtimes are technically still in a sandbox, but it
+ is much less restrictive, so generally you shouldn't need to check for it.
+ see https://cloud.google.com/appengine/docs/standard/runtimes
+ """
+ return is_appengine() and os.environ["APPENGINE_RUNTIME"] == "python27"
+
+
+def is_local_appengine():
+ return "APPENGINE_RUNTIME" in os.environ and os.environ.get(
+ "SERVER_SOFTWARE", ""
+ ).startswith("Development/")
+
+
+def is_prod_appengine():
+ return "APPENGINE_RUNTIME" in os.environ and os.environ.get(
+ "SERVER_SOFTWARE", ""
+ ).startswith("Google App Engine/")
+
+
+def is_prod_appengine_mvms():
+ """Deprecated."""
+ return False
diff --git a/openpype/hosts/fusion/vendor/urllib3/contrib/_securetransport/__init__.py b/openpype/hosts/fusion/vendor/urllib3/contrib/_securetransport/__init__.py
new file mode 100644
index 0000000000..e69de29bb2
diff --git a/openpype/hosts/fusion/vendor/urllib3/contrib/_securetransport/bindings.py b/openpype/hosts/fusion/vendor/urllib3/contrib/_securetransport/bindings.py
new file mode 100644
index 0000000000..11524d400b
--- /dev/null
+++ b/openpype/hosts/fusion/vendor/urllib3/contrib/_securetransport/bindings.py
@@ -0,0 +1,519 @@
+"""
+This module uses ctypes to bind a whole bunch of functions and constants from
+SecureTransport. The goal here is to provide the low-level API to
+SecureTransport. These are essentially the C-level functions and constants, and
+they're pretty gross to work with.
+
+This code is a bastardised version of the code found in Will Bond's oscrypto
+library. An enormous debt is owed to him for blazing this trail for us. For
+that reason, this code should be considered to be covered both by urllib3's
+license and by oscrypto's:
+
+ Copyright (c) 2015-2016 Will Bond
+
+ Permission is hereby granted, free of charge, to any person obtaining a
+ copy of this software and associated documentation files (the "Software"),
+ to deal in the Software without restriction, including without limitation
+ the rights to use, copy, modify, merge, publish, distribute, sublicense,
+ and/or sell copies of the Software, and to permit persons to whom the
+ Software is furnished to do so, subject to the following conditions:
+
+ The above copyright notice and this permission notice shall be included in
+ all copies or substantial portions of the Software.
+
+ THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+ IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+ FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+ AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+ LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+ FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
+ DEALINGS IN THE SOFTWARE.
+"""
+from __future__ import absolute_import
+
+import platform
+from ctypes import (
+ CDLL,
+ CFUNCTYPE,
+ POINTER,
+ c_bool,
+ c_byte,
+ c_char_p,
+ c_int32,
+ c_long,
+ c_size_t,
+ c_uint32,
+ c_ulong,
+ c_void_p,
+)
+from ctypes.util import find_library
+
+from urllib3.packages.six import raise_from
+
+if platform.system() != "Darwin":
+ raise ImportError("Only macOS is supported")
+
+version = platform.mac_ver()[0]
+version_info = tuple(map(int, version.split(".")))
+if version_info < (10, 8):
+ raise OSError(
+ "Only OS X 10.8 and newer are supported, not %s.%s"
+ % (version_info[0], version_info[1])
+ )
+
+
+def load_cdll(name, macos10_16_path):
+ """Loads a CDLL by name, falling back to known path on 10.16+"""
+ try:
+ # Big Sur is technically 11 but we use 10.16 due to the Big Sur
+ # beta being labeled as 10.16.
+ if version_info >= (10, 16):
+ path = macos10_16_path
+ else:
+ path = find_library(name)
+ if not path:
+ raise OSError # Caught and reraised as 'ImportError'
+ return CDLL(path, use_errno=True)
+ except OSError:
+ raise_from(ImportError("The library %s failed to load" % name), None)
+
+
+Security = load_cdll(
+ "Security", "/System/Library/Frameworks/Security.framework/Security"
+)
+CoreFoundation = load_cdll(
+ "CoreFoundation",
+ "/System/Library/Frameworks/CoreFoundation.framework/CoreFoundation",
+)
+
+
+Boolean = c_bool
+CFIndex = c_long
+CFStringEncoding = c_uint32
+CFData = c_void_p
+CFString = c_void_p
+CFArray = c_void_p
+CFMutableArray = c_void_p
+CFDictionary = c_void_p
+CFError = c_void_p
+CFType = c_void_p
+CFTypeID = c_ulong
+
+CFTypeRef = POINTER(CFType)
+CFAllocatorRef = c_void_p
+
+OSStatus = c_int32
+
+CFDataRef = POINTER(CFData)
+CFStringRef = POINTER(CFString)
+CFArrayRef = POINTER(CFArray)
+CFMutableArrayRef = POINTER(CFMutableArray)
+CFDictionaryRef = POINTER(CFDictionary)
+CFArrayCallBacks = c_void_p
+CFDictionaryKeyCallBacks = c_void_p
+CFDictionaryValueCallBacks = c_void_p
+
+SecCertificateRef = POINTER(c_void_p)
+SecExternalFormat = c_uint32
+SecExternalItemType = c_uint32
+SecIdentityRef = POINTER(c_void_p)
+SecItemImportExportFlags = c_uint32
+SecItemImportExportKeyParameters = c_void_p
+SecKeychainRef = POINTER(c_void_p)
+SSLProtocol = c_uint32
+SSLCipherSuite = c_uint32
+SSLContextRef = POINTER(c_void_p)
+SecTrustRef = POINTER(c_void_p)
+SSLConnectionRef = c_uint32
+SecTrustResultType = c_uint32
+SecTrustOptionFlags = c_uint32
+SSLProtocolSide = c_uint32
+SSLConnectionType = c_uint32
+SSLSessionOption = c_uint32
+
+
+try:
+ Security.SecItemImport.argtypes = [
+ CFDataRef,
+ CFStringRef,
+ POINTER(SecExternalFormat),
+ POINTER(SecExternalItemType),
+ SecItemImportExportFlags,
+ POINTER(SecItemImportExportKeyParameters),
+ SecKeychainRef,
+ POINTER(CFArrayRef),
+ ]
+ Security.SecItemImport.restype = OSStatus
+
+ Security.SecCertificateGetTypeID.argtypes = []
+ Security.SecCertificateGetTypeID.restype = CFTypeID
+
+ Security.SecIdentityGetTypeID.argtypes = []
+ Security.SecIdentityGetTypeID.restype = CFTypeID
+
+ Security.SecKeyGetTypeID.argtypes = []
+ Security.SecKeyGetTypeID.restype = CFTypeID
+
+ Security.SecCertificateCreateWithData.argtypes = [CFAllocatorRef, CFDataRef]
+ Security.SecCertificateCreateWithData.restype = SecCertificateRef
+
+ Security.SecCertificateCopyData.argtypes = [SecCertificateRef]
+ Security.SecCertificateCopyData.restype = CFDataRef
+
+ Security.SecCopyErrorMessageString.argtypes = [OSStatus, c_void_p]
+ Security.SecCopyErrorMessageString.restype = CFStringRef
+
+ Security.SecIdentityCreateWithCertificate.argtypes = [
+ CFTypeRef,
+ SecCertificateRef,
+ POINTER(SecIdentityRef),
+ ]
+ Security.SecIdentityCreateWithCertificate.restype = OSStatus
+
+ Security.SecKeychainCreate.argtypes = [
+ c_char_p,
+ c_uint32,
+ c_void_p,
+ Boolean,
+ c_void_p,
+ POINTER(SecKeychainRef),
+ ]
+ Security.SecKeychainCreate.restype = OSStatus
+
+ Security.SecKeychainDelete.argtypes = [SecKeychainRef]
+ Security.SecKeychainDelete.restype = OSStatus
+
+ Security.SecPKCS12Import.argtypes = [
+ CFDataRef,
+ CFDictionaryRef,
+ POINTER(CFArrayRef),
+ ]
+ Security.SecPKCS12Import.restype = OSStatus
+
+ SSLReadFunc = CFUNCTYPE(OSStatus, SSLConnectionRef, c_void_p, POINTER(c_size_t))
+ SSLWriteFunc = CFUNCTYPE(
+ OSStatus, SSLConnectionRef, POINTER(c_byte), POINTER(c_size_t)
+ )
+
+ Security.SSLSetIOFuncs.argtypes = [SSLContextRef, SSLReadFunc, SSLWriteFunc]
+ Security.SSLSetIOFuncs.restype = OSStatus
+
+ Security.SSLSetPeerID.argtypes = [SSLContextRef, c_char_p, c_size_t]
+ Security.SSLSetPeerID.restype = OSStatus
+
+ Security.SSLSetCertificate.argtypes = [SSLContextRef, CFArrayRef]
+ Security.SSLSetCertificate.restype = OSStatus
+
+ Security.SSLSetCertificateAuthorities.argtypes = [SSLContextRef, CFTypeRef, Boolean]
+ Security.SSLSetCertificateAuthorities.restype = OSStatus
+
+ Security.SSLSetConnection.argtypes = [SSLContextRef, SSLConnectionRef]
+ Security.SSLSetConnection.restype = OSStatus
+
+ Security.SSLSetPeerDomainName.argtypes = [SSLContextRef, c_char_p, c_size_t]
+ Security.SSLSetPeerDomainName.restype = OSStatus
+
+ Security.SSLHandshake.argtypes = [SSLContextRef]
+ Security.SSLHandshake.restype = OSStatus
+
+ Security.SSLRead.argtypes = [SSLContextRef, c_char_p, c_size_t, POINTER(c_size_t)]
+ Security.SSLRead.restype = OSStatus
+
+ Security.SSLWrite.argtypes = [SSLContextRef, c_char_p, c_size_t, POINTER(c_size_t)]
+ Security.SSLWrite.restype = OSStatus
+
+ Security.SSLClose.argtypes = [SSLContextRef]
+ Security.SSLClose.restype = OSStatus
+
+ Security.SSLGetNumberSupportedCiphers.argtypes = [SSLContextRef, POINTER(c_size_t)]
+ Security.SSLGetNumberSupportedCiphers.restype = OSStatus
+
+ Security.SSLGetSupportedCiphers.argtypes = [
+ SSLContextRef,
+ POINTER(SSLCipherSuite),
+ POINTER(c_size_t),
+ ]
+ Security.SSLGetSupportedCiphers.restype = OSStatus
+
+ Security.SSLSetEnabledCiphers.argtypes = [
+ SSLContextRef,
+ POINTER(SSLCipherSuite),
+ c_size_t,
+ ]
+ Security.SSLSetEnabledCiphers.restype = OSStatus
+
+ Security.SSLGetNumberEnabledCiphers.argtype = [SSLContextRef, POINTER(c_size_t)]
+ Security.SSLGetNumberEnabledCiphers.restype = OSStatus
+
+ Security.SSLGetEnabledCiphers.argtypes = [
+ SSLContextRef,
+ POINTER(SSLCipherSuite),
+ POINTER(c_size_t),
+ ]
+ Security.SSLGetEnabledCiphers.restype = OSStatus
+
+ Security.SSLGetNegotiatedCipher.argtypes = [SSLContextRef, POINTER(SSLCipherSuite)]
+ Security.SSLGetNegotiatedCipher.restype = OSStatus
+
+ Security.SSLGetNegotiatedProtocolVersion.argtypes = [
+ SSLContextRef,
+ POINTER(SSLProtocol),
+ ]
+ Security.SSLGetNegotiatedProtocolVersion.restype = OSStatus
+
+ Security.SSLCopyPeerTrust.argtypes = [SSLContextRef, POINTER(SecTrustRef)]
+ Security.SSLCopyPeerTrust.restype = OSStatus
+
+ Security.SecTrustSetAnchorCertificates.argtypes = [SecTrustRef, CFArrayRef]
+ Security.SecTrustSetAnchorCertificates.restype = OSStatus
+
+ Security.SecTrustSetAnchorCertificatesOnly.argstypes = [SecTrustRef, Boolean]
+ Security.SecTrustSetAnchorCertificatesOnly.restype = OSStatus
+
+ Security.SecTrustEvaluate.argtypes = [SecTrustRef, POINTER(SecTrustResultType)]
+ Security.SecTrustEvaluate.restype = OSStatus
+
+ Security.SecTrustGetCertificateCount.argtypes = [SecTrustRef]
+ Security.SecTrustGetCertificateCount.restype = CFIndex
+
+ Security.SecTrustGetCertificateAtIndex.argtypes = [SecTrustRef, CFIndex]
+ Security.SecTrustGetCertificateAtIndex.restype = SecCertificateRef
+
+ Security.SSLCreateContext.argtypes = [
+ CFAllocatorRef,
+ SSLProtocolSide,
+ SSLConnectionType,
+ ]
+ Security.SSLCreateContext.restype = SSLContextRef
+
+ Security.SSLSetSessionOption.argtypes = [SSLContextRef, SSLSessionOption, Boolean]
+ Security.SSLSetSessionOption.restype = OSStatus
+
+ Security.SSLSetProtocolVersionMin.argtypes = [SSLContextRef, SSLProtocol]
+ Security.SSLSetProtocolVersionMin.restype = OSStatus
+
+ Security.SSLSetProtocolVersionMax.argtypes = [SSLContextRef, SSLProtocol]
+ Security.SSLSetProtocolVersionMax.restype = OSStatus
+
+ try:
+ Security.SSLSetALPNProtocols.argtypes = [SSLContextRef, CFArrayRef]
+ Security.SSLSetALPNProtocols.restype = OSStatus
+ except AttributeError:
+ # Supported only in 10.12+
+ pass
+
+ Security.SecCopyErrorMessageString.argtypes = [OSStatus, c_void_p]
+ Security.SecCopyErrorMessageString.restype = CFStringRef
+
+ Security.SSLReadFunc = SSLReadFunc
+ Security.SSLWriteFunc = SSLWriteFunc
+ Security.SSLContextRef = SSLContextRef
+ Security.SSLProtocol = SSLProtocol
+ Security.SSLCipherSuite = SSLCipherSuite
+ Security.SecIdentityRef = SecIdentityRef
+ Security.SecKeychainRef = SecKeychainRef
+ Security.SecTrustRef = SecTrustRef
+ Security.SecTrustResultType = SecTrustResultType
+ Security.SecExternalFormat = SecExternalFormat
+ Security.OSStatus = OSStatus
+
+ Security.kSecImportExportPassphrase = CFStringRef.in_dll(
+ Security, "kSecImportExportPassphrase"
+ )
+ Security.kSecImportItemIdentity = CFStringRef.in_dll(
+ Security, "kSecImportItemIdentity"
+ )
+
+ # CoreFoundation time!
+ CoreFoundation.CFRetain.argtypes = [CFTypeRef]
+ CoreFoundation.CFRetain.restype = CFTypeRef
+
+ CoreFoundation.CFRelease.argtypes = [CFTypeRef]
+ CoreFoundation.CFRelease.restype = None
+
+ CoreFoundation.CFGetTypeID.argtypes = [CFTypeRef]
+ CoreFoundation.CFGetTypeID.restype = CFTypeID
+
+ CoreFoundation.CFStringCreateWithCString.argtypes = [
+ CFAllocatorRef,
+ c_char_p,
+ CFStringEncoding,
+ ]
+ CoreFoundation.CFStringCreateWithCString.restype = CFStringRef
+
+ CoreFoundation.CFStringGetCStringPtr.argtypes = [CFStringRef, CFStringEncoding]
+ CoreFoundation.CFStringGetCStringPtr.restype = c_char_p
+
+ CoreFoundation.CFStringGetCString.argtypes = [
+ CFStringRef,
+ c_char_p,
+ CFIndex,
+ CFStringEncoding,
+ ]
+ CoreFoundation.CFStringGetCString.restype = c_bool
+
+ CoreFoundation.CFDataCreate.argtypes = [CFAllocatorRef, c_char_p, CFIndex]
+ CoreFoundation.CFDataCreate.restype = CFDataRef
+
+ CoreFoundation.CFDataGetLength.argtypes = [CFDataRef]
+ CoreFoundation.CFDataGetLength.restype = CFIndex
+
+ CoreFoundation.CFDataGetBytePtr.argtypes = [CFDataRef]
+ CoreFoundation.CFDataGetBytePtr.restype = c_void_p
+
+ CoreFoundation.CFDictionaryCreate.argtypes = [
+ CFAllocatorRef,
+ POINTER(CFTypeRef),
+ POINTER(CFTypeRef),
+ CFIndex,
+ CFDictionaryKeyCallBacks,
+ CFDictionaryValueCallBacks,
+ ]
+ CoreFoundation.CFDictionaryCreate.restype = CFDictionaryRef
+
+ CoreFoundation.CFDictionaryGetValue.argtypes = [CFDictionaryRef, CFTypeRef]
+ CoreFoundation.CFDictionaryGetValue.restype = CFTypeRef
+
+ CoreFoundation.CFArrayCreate.argtypes = [
+ CFAllocatorRef,
+ POINTER(CFTypeRef),
+ CFIndex,
+ CFArrayCallBacks,
+ ]
+ CoreFoundation.CFArrayCreate.restype = CFArrayRef
+
+ CoreFoundation.CFArrayCreateMutable.argtypes = [
+ CFAllocatorRef,
+ CFIndex,
+ CFArrayCallBacks,
+ ]
+ CoreFoundation.CFArrayCreateMutable.restype = CFMutableArrayRef
+
+ CoreFoundation.CFArrayAppendValue.argtypes = [CFMutableArrayRef, c_void_p]
+ CoreFoundation.CFArrayAppendValue.restype = None
+
+ CoreFoundation.CFArrayGetCount.argtypes = [CFArrayRef]
+ CoreFoundation.CFArrayGetCount.restype = CFIndex
+
+ CoreFoundation.CFArrayGetValueAtIndex.argtypes = [CFArrayRef, CFIndex]
+ CoreFoundation.CFArrayGetValueAtIndex.restype = c_void_p
+
+ CoreFoundation.kCFAllocatorDefault = CFAllocatorRef.in_dll(
+ CoreFoundation, "kCFAllocatorDefault"
+ )
+ CoreFoundation.kCFTypeArrayCallBacks = c_void_p.in_dll(
+ CoreFoundation, "kCFTypeArrayCallBacks"
+ )
+ CoreFoundation.kCFTypeDictionaryKeyCallBacks = c_void_p.in_dll(
+ CoreFoundation, "kCFTypeDictionaryKeyCallBacks"
+ )
+ CoreFoundation.kCFTypeDictionaryValueCallBacks = c_void_p.in_dll(
+ CoreFoundation, "kCFTypeDictionaryValueCallBacks"
+ )
+
+ CoreFoundation.CFTypeRef = CFTypeRef
+ CoreFoundation.CFArrayRef = CFArrayRef
+ CoreFoundation.CFStringRef = CFStringRef
+ CoreFoundation.CFDictionaryRef = CFDictionaryRef
+
+except (AttributeError):
+ raise ImportError("Error initializing ctypes")
+
+
+class CFConst(object):
+ """
+ A class object that acts as essentially a namespace for CoreFoundation
+ constants.
+ """
+
+ kCFStringEncodingUTF8 = CFStringEncoding(0x08000100)
+
+
+class SecurityConst(object):
+ """
+ A class object that acts as essentially a namespace for Security constants.
+ """
+
+ kSSLSessionOptionBreakOnServerAuth = 0
+
+ kSSLProtocol2 = 1
+ kSSLProtocol3 = 2
+ kTLSProtocol1 = 4
+ kTLSProtocol11 = 7
+ kTLSProtocol12 = 8
+ # SecureTransport does not support TLS 1.3 even if there's a constant for it
+ kTLSProtocol13 = 10
+ kTLSProtocolMaxSupported = 999
+
+ kSSLClientSide = 1
+ kSSLStreamType = 0
+
+ kSecFormatPEMSequence = 10
+
+ kSecTrustResultInvalid = 0
+ kSecTrustResultProceed = 1
+ # This gap is present on purpose: this was kSecTrustResultConfirm, which
+ # is deprecated.
+ kSecTrustResultDeny = 3
+ kSecTrustResultUnspecified = 4
+ kSecTrustResultRecoverableTrustFailure = 5
+ kSecTrustResultFatalTrustFailure = 6
+ kSecTrustResultOtherError = 7
+
+ errSSLProtocol = -9800
+ errSSLWouldBlock = -9803
+ errSSLClosedGraceful = -9805
+ errSSLClosedNoNotify = -9816
+ errSSLClosedAbort = -9806
+
+ errSSLXCertChainInvalid = -9807
+ errSSLCrypto = -9809
+ errSSLInternal = -9810
+ errSSLCertExpired = -9814
+ errSSLCertNotYetValid = -9815
+ errSSLUnknownRootCert = -9812
+ errSSLNoRootCert = -9813
+ errSSLHostNameMismatch = -9843
+ errSSLPeerHandshakeFail = -9824
+ errSSLPeerUserCancelled = -9839
+ errSSLWeakPeerEphemeralDHKey = -9850
+ errSSLServerAuthCompleted = -9841
+ errSSLRecordOverflow = -9847
+
+ errSecVerifyFailed = -67808
+ errSecNoTrustSettings = -25263
+ errSecItemNotFound = -25300
+ errSecInvalidTrustSettings = -25262
+
+ # Cipher suites. We only pick the ones our default cipher string allows.
+ # Source: https://developer.apple.com/documentation/security/1550981-ssl_cipher_suite_values
+ TLS_ECDHE_ECDSA_WITH_AES_256_GCM_SHA384 = 0xC02C
+ TLS_ECDHE_RSA_WITH_AES_256_GCM_SHA384 = 0xC030
+ TLS_ECDHE_ECDSA_WITH_AES_128_GCM_SHA256 = 0xC02B
+ TLS_ECDHE_RSA_WITH_AES_128_GCM_SHA256 = 0xC02F
+ TLS_ECDHE_ECDSA_WITH_CHACHA20_POLY1305_SHA256 = 0xCCA9
+ TLS_ECDHE_RSA_WITH_CHACHA20_POLY1305_SHA256 = 0xCCA8
+ TLS_DHE_RSA_WITH_AES_256_GCM_SHA384 = 0x009F
+ TLS_DHE_RSA_WITH_AES_128_GCM_SHA256 = 0x009E
+ TLS_ECDHE_ECDSA_WITH_AES_256_CBC_SHA384 = 0xC024
+ TLS_ECDHE_RSA_WITH_AES_256_CBC_SHA384 = 0xC028
+ TLS_ECDHE_ECDSA_WITH_AES_256_CBC_SHA = 0xC00A
+ TLS_ECDHE_RSA_WITH_AES_256_CBC_SHA = 0xC014
+ TLS_DHE_RSA_WITH_AES_256_CBC_SHA256 = 0x006B
+ TLS_DHE_RSA_WITH_AES_256_CBC_SHA = 0x0039
+ TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256 = 0xC023
+ TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256 = 0xC027
+ TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA = 0xC009
+ TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA = 0xC013
+ TLS_DHE_RSA_WITH_AES_128_CBC_SHA256 = 0x0067
+ TLS_DHE_RSA_WITH_AES_128_CBC_SHA = 0x0033
+ TLS_RSA_WITH_AES_256_GCM_SHA384 = 0x009D
+ TLS_RSA_WITH_AES_128_GCM_SHA256 = 0x009C
+ TLS_RSA_WITH_AES_256_CBC_SHA256 = 0x003D
+ TLS_RSA_WITH_AES_128_CBC_SHA256 = 0x003C
+ TLS_RSA_WITH_AES_256_CBC_SHA = 0x0035
+ TLS_RSA_WITH_AES_128_CBC_SHA = 0x002F
+ TLS_AES_128_GCM_SHA256 = 0x1301
+ TLS_AES_256_GCM_SHA384 = 0x1302
+ TLS_AES_128_CCM_8_SHA256 = 0x1305
+ TLS_AES_128_CCM_SHA256 = 0x1304
diff --git a/openpype/hosts/fusion/vendor/urllib3/contrib/_securetransport/low_level.py b/openpype/hosts/fusion/vendor/urllib3/contrib/_securetransport/low_level.py
new file mode 100644
index 0000000000..ed8120190c
--- /dev/null
+++ b/openpype/hosts/fusion/vendor/urllib3/contrib/_securetransport/low_level.py
@@ -0,0 +1,396 @@
+"""
+Low-level helpers for the SecureTransport bindings.
+
+These are Python functions that are not directly related to the high-level APIs
+but are necessary to get them to work. They include a whole bunch of low-level
+CoreFoundation messing about and memory management. The concerns in this module
+are almost entirely about trying to avoid memory leaks and providing
+appropriate and useful assistance to the higher-level code.
+"""
+import base64
+import ctypes
+import itertools
+import os
+import re
+import ssl
+import struct
+import tempfile
+
+from .bindings import CFConst, CoreFoundation, Security
+
+# This regular expression is used to grab PEM data out of a PEM bundle.
+_PEM_CERTS_RE = re.compile(
+ b"-----BEGIN CERTIFICATE-----\n(.*?)\n-----END CERTIFICATE-----", re.DOTALL
+)
+
+
+def _cf_data_from_bytes(bytestring):
+ """
+ Given a bytestring, create a CFData object from it. This CFData object must
+ be CFReleased by the caller.
+ """
+ return CoreFoundation.CFDataCreate(
+ CoreFoundation.kCFAllocatorDefault, bytestring, len(bytestring)
+ )
+
+
+def _cf_dictionary_from_tuples(tuples):
+ """
+ Given a list of Python tuples, create an associated CFDictionary.
+ """
+ dictionary_size = len(tuples)
+
+ # We need to get the dictionary keys and values out in the same order.
+ keys = (t[0] for t in tuples)
+ values = (t[1] for t in tuples)
+ cf_keys = (CoreFoundation.CFTypeRef * dictionary_size)(*keys)
+ cf_values = (CoreFoundation.CFTypeRef * dictionary_size)(*values)
+
+ return CoreFoundation.CFDictionaryCreate(
+ CoreFoundation.kCFAllocatorDefault,
+ cf_keys,
+ cf_values,
+ dictionary_size,
+ CoreFoundation.kCFTypeDictionaryKeyCallBacks,
+ CoreFoundation.kCFTypeDictionaryValueCallBacks,
+ )
+
+
+def _cfstr(py_bstr):
+ """
+ Given a Python binary data, create a CFString.
+ The string must be CFReleased by the caller.
+ """
+ c_str = ctypes.c_char_p(py_bstr)
+ cf_str = CoreFoundation.CFStringCreateWithCString(
+ CoreFoundation.kCFAllocatorDefault,
+ c_str,
+ CFConst.kCFStringEncodingUTF8,
+ )
+ return cf_str
+
+
+def _create_cfstring_array(lst):
+ """
+ Given a list of Python binary data, create an associated CFMutableArray.
+ The array must be CFReleased by the caller.
+
+ Raises an ssl.SSLError on failure.
+ """
+ cf_arr = None
+ try:
+ cf_arr = CoreFoundation.CFArrayCreateMutable(
+ CoreFoundation.kCFAllocatorDefault,
+ 0,
+ ctypes.byref(CoreFoundation.kCFTypeArrayCallBacks),
+ )
+ if not cf_arr:
+ raise MemoryError("Unable to allocate memory!")
+ for item in lst:
+ cf_str = _cfstr(item)
+ if not cf_str:
+ raise MemoryError("Unable to allocate memory!")
+ try:
+ CoreFoundation.CFArrayAppendValue(cf_arr, cf_str)
+ finally:
+ CoreFoundation.CFRelease(cf_str)
+ except BaseException as e:
+ if cf_arr:
+ CoreFoundation.CFRelease(cf_arr)
+ raise ssl.SSLError("Unable to allocate array: %s" % (e,))
+ return cf_arr
+
+
+def _cf_string_to_unicode(value):
+ """
+ Creates a Unicode string from a CFString object. Used entirely for error
+ reporting.
+
+ Yes, it annoys me quite a lot that this function is this complex.
+ """
+ value_as_void_p = ctypes.cast(value, ctypes.POINTER(ctypes.c_void_p))
+
+ string = CoreFoundation.CFStringGetCStringPtr(
+ value_as_void_p, CFConst.kCFStringEncodingUTF8
+ )
+ if string is None:
+ buffer = ctypes.create_string_buffer(1024)
+ result = CoreFoundation.CFStringGetCString(
+ value_as_void_p, buffer, 1024, CFConst.kCFStringEncodingUTF8
+ )
+ if not result:
+ raise OSError("Error copying C string from CFStringRef")
+ string = buffer.value
+ if string is not None:
+ string = string.decode("utf-8")
+ return string
+
+
+def _assert_no_error(error, exception_class=None):
+ """
+ Checks the return code and throws an exception if there is an error to
+ report
+ """
+ if error == 0:
+ return
+
+ cf_error_string = Security.SecCopyErrorMessageString(error, None)
+ output = _cf_string_to_unicode(cf_error_string)
+ CoreFoundation.CFRelease(cf_error_string)
+
+ if output is None or output == u"":
+ output = u"OSStatus %s" % error
+
+ if exception_class is None:
+ exception_class = ssl.SSLError
+
+ raise exception_class(output)
+
+
+def _cert_array_from_pem(pem_bundle):
+ """
+ Given a bundle of certs in PEM format, turns them into a CFArray of certs
+ that can be used to validate a cert chain.
+ """
+ # Normalize the PEM bundle's line endings.
+ pem_bundle = pem_bundle.replace(b"\r\n", b"\n")
+
+ der_certs = [
+ base64.b64decode(match.group(1)) for match in _PEM_CERTS_RE.finditer(pem_bundle)
+ ]
+ if not der_certs:
+ raise ssl.SSLError("No root certificates specified")
+
+ cert_array = CoreFoundation.CFArrayCreateMutable(
+ CoreFoundation.kCFAllocatorDefault,
+ 0,
+ ctypes.byref(CoreFoundation.kCFTypeArrayCallBacks),
+ )
+ if not cert_array:
+ raise ssl.SSLError("Unable to allocate memory!")
+
+ try:
+ for der_bytes in der_certs:
+ certdata = _cf_data_from_bytes(der_bytes)
+ if not certdata:
+ raise ssl.SSLError("Unable to allocate memory!")
+ cert = Security.SecCertificateCreateWithData(
+ CoreFoundation.kCFAllocatorDefault, certdata
+ )
+ CoreFoundation.CFRelease(certdata)
+ if not cert:
+ raise ssl.SSLError("Unable to build cert object!")
+
+ CoreFoundation.CFArrayAppendValue(cert_array, cert)
+ CoreFoundation.CFRelease(cert)
+ except Exception:
+ # We need to free the array before the exception bubbles further.
+ # We only want to do that if an error occurs: otherwise, the caller
+ # should free.
+ CoreFoundation.CFRelease(cert_array)
+
+ return cert_array
+
+
+def _is_cert(item):
+ """
+ Returns True if a given CFTypeRef is a certificate.
+ """
+ expected = Security.SecCertificateGetTypeID()
+ return CoreFoundation.CFGetTypeID(item) == expected
+
+
+def _is_identity(item):
+ """
+ Returns True if a given CFTypeRef is an identity.
+ """
+ expected = Security.SecIdentityGetTypeID()
+ return CoreFoundation.CFGetTypeID(item) == expected
+
+
+def _temporary_keychain():
+ """
+ This function creates a temporary Mac keychain that we can use to work with
+ credentials. This keychain uses a one-time password and a temporary file to
+ store the data. We expect to have one keychain per socket. The returned
+ SecKeychainRef must be freed by the caller, including calling
+ SecKeychainDelete.
+
+ Returns a tuple of the SecKeychainRef and the path to the temporary
+ directory that contains it.
+ """
+ # Unfortunately, SecKeychainCreate requires a path to a keychain. This
+ # means we cannot use mkstemp to use a generic temporary file. Instead,
+ # we're going to create a temporary directory and a filename to use there.
+ # This filename will be 8 random bytes expanded into base64. We also need
+ # some random bytes to password-protect the keychain we're creating, so we
+ # ask for 40 random bytes.
+ random_bytes = os.urandom(40)
+ filename = base64.b16encode(random_bytes[:8]).decode("utf-8")
+ password = base64.b16encode(random_bytes[8:]) # Must be valid UTF-8
+ tempdirectory = tempfile.mkdtemp()
+
+ keychain_path = os.path.join(tempdirectory, filename).encode("utf-8")
+
+ # We now want to create the keychain itself.
+ keychain = Security.SecKeychainRef()
+ status = Security.SecKeychainCreate(
+ keychain_path, len(password), password, False, None, ctypes.byref(keychain)
+ )
+ _assert_no_error(status)
+
+ # Having created the keychain, we want to pass it off to the caller.
+ return keychain, tempdirectory
+
+
+def _load_items_from_file(keychain, path):
+ """
+ Given a single file, loads all the trust objects from it into arrays and
+ the keychain.
+ Returns a tuple of lists: the first list is a list of identities, the
+ second a list of certs.
+ """
+ certificates = []
+ identities = []
+ result_array = None
+
+ with open(path, "rb") as f:
+ raw_filedata = f.read()
+
+ try:
+ filedata = CoreFoundation.CFDataCreate(
+ CoreFoundation.kCFAllocatorDefault, raw_filedata, len(raw_filedata)
+ )
+ result_array = CoreFoundation.CFArrayRef()
+ result = Security.SecItemImport(
+ filedata, # cert data
+ None, # Filename, leaving it out for now
+ None, # What the type of the file is, we don't care
+ None, # what's in the file, we don't care
+ 0, # import flags
+ None, # key params, can include passphrase in the future
+ keychain, # The keychain to insert into
+ ctypes.byref(result_array), # Results
+ )
+ _assert_no_error(result)
+
+ # A CFArray is not very useful to us as an intermediary
+ # representation, so we are going to extract the objects we want
+ # and then free the array. We don't need to keep hold of keys: the
+ # keychain already has them!
+ result_count = CoreFoundation.CFArrayGetCount(result_array)
+ for index in range(result_count):
+ item = CoreFoundation.CFArrayGetValueAtIndex(result_array, index)
+ item = ctypes.cast(item, CoreFoundation.CFTypeRef)
+
+ if _is_cert(item):
+ CoreFoundation.CFRetain(item)
+ certificates.append(item)
+ elif _is_identity(item):
+ CoreFoundation.CFRetain(item)
+ identities.append(item)
+ finally:
+ if result_array:
+ CoreFoundation.CFRelease(result_array)
+
+ CoreFoundation.CFRelease(filedata)
+
+ return (identities, certificates)
+
+
+def _load_client_cert_chain(keychain, *paths):
+ """
+ Load certificates and maybe keys from a number of files. Has the end goal
+ of returning a CFArray containing one SecIdentityRef, and then zero or more
+ SecCertificateRef objects, suitable for use as a client certificate trust
+ chain.
+ """
+ # Ok, the strategy.
+ #
+ # This relies on knowing that macOS will not give you a SecIdentityRef
+ # unless you have imported a key into a keychain. This is a somewhat
+ # artificial limitation of macOS (for example, it doesn't necessarily
+ # affect iOS), but there is nothing inside Security.framework that lets you
+ # get a SecIdentityRef without having a key in a keychain.
+ #
+ # So the policy here is we take all the files and iterate them in order.
+ # Each one will use SecItemImport to have one or more objects loaded from
+ # it. We will also point at a keychain that macOS can use to work with the
+ # private key.
+ #
+ # Once we have all the objects, we'll check what we actually have. If we
+ # already have a SecIdentityRef in hand, fab: we'll use that. Otherwise,
+ # we'll take the first certificate (which we assume to be our leaf) and
+ # ask the keychain to give us a SecIdentityRef with that cert's associated
+ # key.
+ #
+ # We'll then return a CFArray containing the trust chain: one
+ # SecIdentityRef and then zero-or-more SecCertificateRef objects. The
+ # responsibility for freeing this CFArray will be with the caller. This
+ # CFArray must remain alive for the entire connection, so in practice it
+ # will be stored with a single SSLSocket, along with the reference to the
+ # keychain.
+ certificates = []
+ identities = []
+
+ # Filter out bad paths.
+ paths = (path for path in paths if path)
+
+ try:
+ for file_path in paths:
+ new_identities, new_certs = _load_items_from_file(keychain, file_path)
+ identities.extend(new_identities)
+ certificates.extend(new_certs)
+
+ # Ok, we have everything. The question is: do we have an identity? If
+ # not, we want to grab one from the first cert we have.
+ if not identities:
+ new_identity = Security.SecIdentityRef()
+ status = Security.SecIdentityCreateWithCertificate(
+ keychain, certificates[0], ctypes.byref(new_identity)
+ )
+ _assert_no_error(status)
+ identities.append(new_identity)
+
+ # We now want to release the original certificate, as we no longer
+ # need it.
+ CoreFoundation.CFRelease(certificates.pop(0))
+
+ # We now need to build a new CFArray that holds the trust chain.
+ trust_chain = CoreFoundation.CFArrayCreateMutable(
+ CoreFoundation.kCFAllocatorDefault,
+ 0,
+ ctypes.byref(CoreFoundation.kCFTypeArrayCallBacks),
+ )
+ for item in itertools.chain(identities, certificates):
+ # ArrayAppendValue does a CFRetain on the item. That's fine,
+ # because the finally block will release our other refs to them.
+ CoreFoundation.CFArrayAppendValue(trust_chain, item)
+
+ return trust_chain
+ finally:
+ for obj in itertools.chain(identities, certificates):
+ CoreFoundation.CFRelease(obj)
+
+
+TLS_PROTOCOL_VERSIONS = {
+ "SSLv2": (0, 2),
+ "SSLv3": (3, 0),
+ "TLSv1": (3, 1),
+ "TLSv1.1": (3, 2),
+ "TLSv1.2": (3, 3),
+}
+
+
+def _build_tls_unknown_ca_alert(version):
+ """
+ Builds a TLS alert record for an unknown CA.
+ """
+ ver_maj, ver_min = TLS_PROTOCOL_VERSIONS[version]
+ severity_fatal = 0x02
+ description_unknown_ca = 0x30
+ msg = struct.pack(">BB", severity_fatal, description_unknown_ca)
+ msg_len = len(msg)
+ record_type_alert = 0x15
+ record = struct.pack(">BBBH", record_type_alert, ver_maj, ver_min, msg_len) + msg
+ return record
diff --git a/openpype/hosts/fusion/vendor/urllib3/contrib/appengine.py b/openpype/hosts/fusion/vendor/urllib3/contrib/appengine.py
new file mode 100644
index 0000000000..f91bdd6e77
--- /dev/null
+++ b/openpype/hosts/fusion/vendor/urllib3/contrib/appengine.py
@@ -0,0 +1,314 @@
+"""
+This module provides a pool manager that uses Google App Engine's
+`URLFetch Service `_.
+
+Example usage::
+
+ from urllib3 import PoolManager
+ from urllib3.contrib.appengine import AppEngineManager, is_appengine_sandbox
+
+ if is_appengine_sandbox():
+ # AppEngineManager uses AppEngine's URLFetch API behind the scenes
+ http = AppEngineManager()
+ else:
+ # PoolManager uses a socket-level API behind the scenes
+ http = PoolManager()
+
+ r = http.request('GET', 'https://google.com/')
+
+There are `limitations `_ to the URLFetch service and it may not be
+the best choice for your application. There are three options for using
+urllib3 on Google App Engine:
+
+1. You can use :class:`AppEngineManager` with URLFetch. URLFetch is
+ cost-effective in many circumstances as long as your usage is within the
+ limitations.
+2. You can use a normal :class:`~urllib3.PoolManager` by enabling sockets.
+ Sockets also have `limitations and restrictions
+ `_ and have a lower free quota than URLFetch.
+ To use sockets, be sure to specify the following in your ``app.yaml``::
+
+ env_variables:
+ GAE_USE_SOCKETS_HTTPLIB : 'true'
+
+3. If you are using `App Engine Flexible
+ `_, you can use the standard
+:class:`PoolManager` without any configuration or special environment variables.
+"""
+
+from __future__ import absolute_import
+
+import io
+import logging
+import warnings
+
+from ..exceptions import (
+ HTTPError,
+ HTTPWarning,
+ MaxRetryError,
+ ProtocolError,
+ SSLError,
+ TimeoutError,
+)
+from ..packages.six.moves.urllib.parse import urljoin
+from ..request import RequestMethods
+from ..response import HTTPResponse
+from ..util.retry import Retry
+from ..util.timeout import Timeout
+from . import _appengine_environ
+
+try:
+ from google.appengine.api import urlfetch
+except ImportError:
+ urlfetch = None
+
+
+log = logging.getLogger(__name__)
+
+
+class AppEnginePlatformWarning(HTTPWarning):
+ pass
+
+
+class AppEnginePlatformError(HTTPError):
+ pass
+
+
+class AppEngineManager(RequestMethods):
+ """
+ Connection manager for Google App Engine sandbox applications.
+
+ This manager uses the URLFetch service directly instead of using the
+ emulated httplib, and is subject to URLFetch limitations as described in
+ the App Engine documentation `here
+ `_.
+
+ Notably it will raise an :class:`AppEnginePlatformError` if:
+ * URLFetch is not available.
+ * If you attempt to use this on App Engine Flexible, as full socket
+ support is available.
+ * If a request size is more than 10 megabytes.
+ * If a response size is more than 32 megabytes.
+ * If you use an unsupported request method such as OPTIONS.
+
+ Beyond those cases, it will raise normal urllib3 errors.
+ """
+
+ def __init__(
+ self,
+ headers=None,
+ retries=None,
+ validate_certificate=True,
+ urlfetch_retries=True,
+ ):
+ if not urlfetch:
+ raise AppEnginePlatformError(
+ "URLFetch is not available in this environment."
+ )
+
+ warnings.warn(
+ "urllib3 is using URLFetch on Google App Engine sandbox instead "
+ "of sockets. To use sockets directly instead of URLFetch see "
+ "https://urllib3.readthedocs.io/en/1.26.x/reference/urllib3.contrib.html.",
+ AppEnginePlatformWarning,
+ )
+
+ RequestMethods.__init__(self, headers)
+ self.validate_certificate = validate_certificate
+ self.urlfetch_retries = urlfetch_retries
+
+ self.retries = retries or Retry.DEFAULT
+
+ def __enter__(self):
+ return self
+
+ def __exit__(self, exc_type, exc_val, exc_tb):
+ # Return False to re-raise any potential exceptions
+ return False
+
+ def urlopen(
+ self,
+ method,
+ url,
+ body=None,
+ headers=None,
+ retries=None,
+ redirect=True,
+ timeout=Timeout.DEFAULT_TIMEOUT,
+ **response_kw
+ ):
+
+ retries = self._get_retries(retries, redirect)
+
+ try:
+ follow_redirects = redirect and retries.redirect != 0 and retries.total
+ response = urlfetch.fetch(
+ url,
+ payload=body,
+ method=method,
+ headers=headers or {},
+ allow_truncated=False,
+ follow_redirects=self.urlfetch_retries and follow_redirects,
+ deadline=self._get_absolute_timeout(timeout),
+ validate_certificate=self.validate_certificate,
+ )
+ except urlfetch.DeadlineExceededError as e:
+ raise TimeoutError(self, e)
+
+ except urlfetch.InvalidURLError as e:
+ if "too large" in str(e):
+ raise AppEnginePlatformError(
+ "URLFetch request too large, URLFetch only "
+ "supports requests up to 10mb in size.",
+ e,
+ )
+ raise ProtocolError(e)
+
+ except urlfetch.DownloadError as e:
+ if "Too many redirects" in str(e):
+ raise MaxRetryError(self, url, reason=e)
+ raise ProtocolError(e)
+
+ except urlfetch.ResponseTooLargeError as e:
+ raise AppEnginePlatformError(
+ "URLFetch response too large, URLFetch only supports"
+ "responses up to 32mb in size.",
+ e,
+ )
+
+ except urlfetch.SSLCertificateError as e:
+ raise SSLError(e)
+
+ except urlfetch.InvalidMethodError as e:
+ raise AppEnginePlatformError(
+ "URLFetch does not support method: %s" % method, e
+ )
+
+ http_response = self._urlfetch_response_to_http_response(
+ response, retries=retries, **response_kw
+ )
+
+ # Handle redirect?
+ redirect_location = redirect and http_response.get_redirect_location()
+ if redirect_location:
+ # Check for redirect response
+ if self.urlfetch_retries and retries.raise_on_redirect:
+ raise MaxRetryError(self, url, "too many redirects")
+ else:
+ if http_response.status == 303:
+ method = "GET"
+
+ try:
+ retries = retries.increment(
+ method, url, response=http_response, _pool=self
+ )
+ except MaxRetryError:
+ if retries.raise_on_redirect:
+ raise MaxRetryError(self, url, "too many redirects")
+ return http_response
+
+ retries.sleep_for_retry(http_response)
+ log.debug("Redirecting %s -> %s", url, redirect_location)
+ redirect_url = urljoin(url, redirect_location)
+ return self.urlopen(
+ method,
+ redirect_url,
+ body,
+ headers,
+ retries=retries,
+ redirect=redirect,
+ timeout=timeout,
+ **response_kw
+ )
+
+ # Check if we should retry the HTTP response.
+ has_retry_after = bool(http_response.getheader("Retry-After"))
+ if retries.is_retry(method, http_response.status, has_retry_after):
+ retries = retries.increment(method, url, response=http_response, _pool=self)
+ log.debug("Retry: %s", url)
+ retries.sleep(http_response)
+ return self.urlopen(
+ method,
+ url,
+ body=body,
+ headers=headers,
+ retries=retries,
+ redirect=redirect,
+ timeout=timeout,
+ **response_kw
+ )
+
+ return http_response
+
+ def _urlfetch_response_to_http_response(self, urlfetch_resp, **response_kw):
+
+ if is_prod_appengine():
+ # Production GAE handles deflate encoding automatically, but does
+ # not remove the encoding header.
+ content_encoding = urlfetch_resp.headers.get("content-encoding")
+
+ if content_encoding == "deflate":
+ del urlfetch_resp.headers["content-encoding"]
+
+ transfer_encoding = urlfetch_resp.headers.get("transfer-encoding")
+ # We have a full response's content,
+ # so let's make sure we don't report ourselves as chunked data.
+ if transfer_encoding == "chunked":
+ encodings = transfer_encoding.split(",")
+ encodings.remove("chunked")
+ urlfetch_resp.headers["transfer-encoding"] = ",".join(encodings)
+
+ original_response = HTTPResponse(
+ # In order for decoding to work, we must present the content as
+ # a file-like object.
+ body=io.BytesIO(urlfetch_resp.content),
+ msg=urlfetch_resp.header_msg,
+ headers=urlfetch_resp.headers,
+ status=urlfetch_resp.status_code,
+ **response_kw
+ )
+
+ return HTTPResponse(
+ body=io.BytesIO(urlfetch_resp.content),
+ headers=urlfetch_resp.headers,
+ status=urlfetch_resp.status_code,
+ original_response=original_response,
+ **response_kw
+ )
+
+ def _get_absolute_timeout(self, timeout):
+ if timeout is Timeout.DEFAULT_TIMEOUT:
+ return None # Defer to URLFetch's default.
+ if isinstance(timeout, Timeout):
+ if timeout._read is not None or timeout._connect is not None:
+ warnings.warn(
+ "URLFetch does not support granular timeout settings, "
+ "reverting to total or default URLFetch timeout.",
+ AppEnginePlatformWarning,
+ )
+ return timeout.total
+ return timeout
+
+ def _get_retries(self, retries, redirect):
+ if not isinstance(retries, Retry):
+ retries = Retry.from_int(retries, redirect=redirect, default=self.retries)
+
+ if retries.connect or retries.read or retries.redirect:
+ warnings.warn(
+ "URLFetch only supports total retries and does not "
+ "recognize connect, read, or redirect retry parameters.",
+ AppEnginePlatformWarning,
+ )
+
+ return retries
+
+
+# Alias methods from _appengine_environ to maintain public API interface.
+
+is_appengine = _appengine_environ.is_appengine
+is_appengine_sandbox = _appengine_environ.is_appengine_sandbox
+is_local_appengine = _appengine_environ.is_local_appengine
+is_prod_appengine = _appengine_environ.is_prod_appengine
+is_prod_appengine_mvms = _appengine_environ.is_prod_appengine_mvms
diff --git a/openpype/hosts/fusion/vendor/urllib3/contrib/ntlmpool.py b/openpype/hosts/fusion/vendor/urllib3/contrib/ntlmpool.py
new file mode 100644
index 0000000000..41a8fd174c
--- /dev/null
+++ b/openpype/hosts/fusion/vendor/urllib3/contrib/ntlmpool.py
@@ -0,0 +1,130 @@
+"""
+NTLM authenticating pool, contributed by erikcederstran
+
+Issue #10, see: http://code.google.com/p/urllib3/issues/detail?id=10
+"""
+from __future__ import absolute_import
+
+import warnings
+from logging import getLogger
+
+from ntlm import ntlm
+
+from .. import HTTPSConnectionPool
+from ..packages.six.moves.http_client import HTTPSConnection
+
+warnings.warn(
+ "The 'urllib3.contrib.ntlmpool' module is deprecated and will be removed "
+ "in urllib3 v2.0 release, urllib3 is not able to support it properly due "
+ "to reasons listed in issue: https://github.com/urllib3/urllib3/issues/2282. "
+ "If you are a user of this module please comment in the mentioned issue.",
+ DeprecationWarning,
+)
+
+log = getLogger(__name__)
+
+
+class NTLMConnectionPool(HTTPSConnectionPool):
+ """
+ Implements an NTLM authentication version of an urllib3 connection pool
+ """
+
+ scheme = "https"
+
+ def __init__(self, user, pw, authurl, *args, **kwargs):
+ """
+ authurl is a random URL on the server that is protected by NTLM.
+ user is the Windows user, probably in the DOMAIN\\username format.
+ pw is the password for the user.
+ """
+ super(NTLMConnectionPool, self).__init__(*args, **kwargs)
+ self.authurl = authurl
+ self.rawuser = user
+ user_parts = user.split("\\", 1)
+ self.domain = user_parts[0].upper()
+ self.user = user_parts[1]
+ self.pw = pw
+
+ def _new_conn(self):
+ # Performs the NTLM handshake that secures the connection. The socket
+ # must be kept open while requests are performed.
+ self.num_connections += 1
+ log.debug(
+ "Starting NTLM HTTPS connection no. %d: https://%s%s",
+ self.num_connections,
+ self.host,
+ self.authurl,
+ )
+
+ headers = {"Connection": "Keep-Alive"}
+ req_header = "Authorization"
+ resp_header = "www-authenticate"
+
+ conn = HTTPSConnection(host=self.host, port=self.port)
+
+ # Send negotiation message
+ headers[req_header] = "NTLM %s" % ntlm.create_NTLM_NEGOTIATE_MESSAGE(
+ self.rawuser
+ )
+ log.debug("Request headers: %s", headers)
+ conn.request("GET", self.authurl, None, headers)
+ res = conn.getresponse()
+ reshdr = dict(res.getheaders())
+ log.debug("Response status: %s %s", res.status, res.reason)
+ log.debug("Response headers: %s", reshdr)
+ log.debug("Response data: %s [...]", res.read(100))
+
+ # Remove the reference to the socket, so that it can not be closed by
+ # the response object (we want to keep the socket open)
+ res.fp = None
+
+ # Server should respond with a challenge message
+ auth_header_values = reshdr[resp_header].split(", ")
+ auth_header_value = None
+ for s in auth_header_values:
+ if s[:5] == "NTLM ":
+ auth_header_value = s[5:]
+ if auth_header_value is None:
+ raise Exception(
+ "Unexpected %s response header: %s" % (resp_header, reshdr[resp_header])
+ )
+
+ # Send authentication message
+ ServerChallenge, NegotiateFlags = ntlm.parse_NTLM_CHALLENGE_MESSAGE(
+ auth_header_value
+ )
+ auth_msg = ntlm.create_NTLM_AUTHENTICATE_MESSAGE(
+ ServerChallenge, self.user, self.domain, self.pw, NegotiateFlags
+ )
+ headers[req_header] = "NTLM %s" % auth_msg
+ log.debug("Request headers: %s", headers)
+ conn.request("GET", self.authurl, None, headers)
+ res = conn.getresponse()
+ log.debug("Response status: %s %s", res.status, res.reason)
+ log.debug("Response headers: %s", dict(res.getheaders()))
+ log.debug("Response data: %s [...]", res.read()[:100])
+ if res.status != 200:
+ if res.status == 401:
+ raise Exception("Server rejected request: wrong username or password")
+ raise Exception("Wrong server response: %s %s" % (res.status, res.reason))
+
+ res.fp = None
+ log.debug("Connection established")
+ return conn
+
+ def urlopen(
+ self,
+ method,
+ url,
+ body=None,
+ headers=None,
+ retries=3,
+ redirect=True,
+ assert_same_host=True,
+ ):
+ if headers is None:
+ headers = {}
+ headers["Connection"] = "Keep-Alive"
+ return super(NTLMConnectionPool, self).urlopen(
+ method, url, body, headers, retries, redirect, assert_same_host
+ )
diff --git a/openpype/hosts/fusion/vendor/urllib3/contrib/pyopenssl.py b/openpype/hosts/fusion/vendor/urllib3/contrib/pyopenssl.py
new file mode 100644
index 0000000000..def83afdb2
--- /dev/null
+++ b/openpype/hosts/fusion/vendor/urllib3/contrib/pyopenssl.py
@@ -0,0 +1,511 @@
+"""
+TLS with SNI_-support for Python 2. Follow these instructions if you would
+like to verify TLS certificates in Python 2. Note, the default libraries do
+*not* do certificate checking; you need to do additional work to validate
+certificates yourself.
+
+This needs the following packages installed:
+
+* `pyOpenSSL`_ (tested with 16.0.0)
+* `cryptography`_ (minimum 1.3.4, from pyopenssl)
+* `idna`_ (minimum 2.0, from cryptography)
+
+However, pyopenssl depends on cryptography, which depends on idna, so while we
+use all three directly here we end up having relatively few packages required.
+
+You can install them with the following command:
+
+.. code-block:: bash
+
+ $ python -m pip install pyopenssl cryptography idna
+
+To activate certificate checking, call
+:func:`~urllib3.contrib.pyopenssl.inject_into_urllib3` from your Python code
+before you begin making HTTP requests. This can be done in a ``sitecustomize``
+module, or at any other time before your application begins using ``urllib3``,
+like this:
+
+.. code-block:: python
+
+ try:
+ import urllib3.contrib.pyopenssl
+ urllib3.contrib.pyopenssl.inject_into_urllib3()
+ except ImportError:
+ pass
+
+Now you can use :mod:`urllib3` as you normally would, and it will support SNI
+when the required modules are installed.
+
+Activating this module also has the positive side effect of disabling SSL/TLS
+compression in Python 2 (see `CRIME attack`_).
+
+.. _sni: https://en.wikipedia.org/wiki/Server_Name_Indication
+.. _crime attack: https://en.wikipedia.org/wiki/CRIME_(security_exploit)
+.. _pyopenssl: https://www.pyopenssl.org
+.. _cryptography: https://cryptography.io
+.. _idna: https://github.com/kjd/idna
+"""
+from __future__ import absolute_import
+
+import OpenSSL.SSL
+from cryptography import x509
+from cryptography.hazmat.backends.openssl import backend as openssl_backend
+from cryptography.hazmat.backends.openssl.x509 import _Certificate
+
+try:
+ from cryptography.x509 import UnsupportedExtension
+except ImportError:
+ # UnsupportedExtension is gone in cryptography >= 2.1.0
+ class UnsupportedExtension(Exception):
+ pass
+
+
+from io import BytesIO
+from socket import error as SocketError
+from socket import timeout
+
+try: # Platform-specific: Python 2
+ from socket import _fileobject
+except ImportError: # Platform-specific: Python 3
+ _fileobject = None
+ from ..packages.backports.makefile import backport_makefile
+
+import logging
+import ssl
+import sys
+
+from .. import util
+from ..packages import six
+from ..util.ssl_ import PROTOCOL_TLS_CLIENT
+
+__all__ = ["inject_into_urllib3", "extract_from_urllib3"]
+
+# SNI always works.
+HAS_SNI = True
+
+# Map from urllib3 to PyOpenSSL compatible parameter-values.
+_openssl_versions = {
+ util.PROTOCOL_TLS: OpenSSL.SSL.SSLv23_METHOD,
+ PROTOCOL_TLS_CLIENT: OpenSSL.SSL.SSLv23_METHOD,
+ ssl.PROTOCOL_TLSv1: OpenSSL.SSL.TLSv1_METHOD,
+}
+
+if hasattr(ssl, "PROTOCOL_SSLv3") and hasattr(OpenSSL.SSL, "SSLv3_METHOD"):
+ _openssl_versions[ssl.PROTOCOL_SSLv3] = OpenSSL.SSL.SSLv3_METHOD
+
+if hasattr(ssl, "PROTOCOL_TLSv1_1") and hasattr(OpenSSL.SSL, "TLSv1_1_METHOD"):
+ _openssl_versions[ssl.PROTOCOL_TLSv1_1] = OpenSSL.SSL.TLSv1_1_METHOD
+
+if hasattr(ssl, "PROTOCOL_TLSv1_2") and hasattr(OpenSSL.SSL, "TLSv1_2_METHOD"):
+ _openssl_versions[ssl.PROTOCOL_TLSv1_2] = OpenSSL.SSL.TLSv1_2_METHOD
+
+
+_stdlib_to_openssl_verify = {
+ ssl.CERT_NONE: OpenSSL.SSL.VERIFY_NONE,
+ ssl.CERT_OPTIONAL: OpenSSL.SSL.VERIFY_PEER,
+ ssl.CERT_REQUIRED: OpenSSL.SSL.VERIFY_PEER
+ + OpenSSL.SSL.VERIFY_FAIL_IF_NO_PEER_CERT,
+}
+_openssl_to_stdlib_verify = dict((v, k) for k, v in _stdlib_to_openssl_verify.items())
+
+# OpenSSL will only write 16K at a time
+SSL_WRITE_BLOCKSIZE = 16384
+
+orig_util_HAS_SNI = util.HAS_SNI
+orig_util_SSLContext = util.ssl_.SSLContext
+
+
+log = logging.getLogger(__name__)
+
+
+def inject_into_urllib3():
+ "Monkey-patch urllib3 with PyOpenSSL-backed SSL-support."
+
+ _validate_dependencies_met()
+
+ util.SSLContext = PyOpenSSLContext
+ util.ssl_.SSLContext = PyOpenSSLContext
+ util.HAS_SNI = HAS_SNI
+ util.ssl_.HAS_SNI = HAS_SNI
+ util.IS_PYOPENSSL = True
+ util.ssl_.IS_PYOPENSSL = True
+
+
+def extract_from_urllib3():
+ "Undo monkey-patching by :func:`inject_into_urllib3`."
+
+ util.SSLContext = orig_util_SSLContext
+ util.ssl_.SSLContext = orig_util_SSLContext
+ util.HAS_SNI = orig_util_HAS_SNI
+ util.ssl_.HAS_SNI = orig_util_HAS_SNI
+ util.IS_PYOPENSSL = False
+ util.ssl_.IS_PYOPENSSL = False
+
+
+def _validate_dependencies_met():
+ """
+ Verifies that PyOpenSSL's package-level dependencies have been met.
+ Throws `ImportError` if they are not met.
+ """
+ # Method added in `cryptography==1.1`; not available in older versions
+ from cryptography.x509.extensions import Extensions
+
+ if getattr(Extensions, "get_extension_for_class", None) is None:
+ raise ImportError(
+ "'cryptography' module missing required functionality. "
+ "Try upgrading to v1.3.4 or newer."
+ )
+
+ # pyOpenSSL 0.14 and above use cryptography for OpenSSL bindings. The _x509
+ # attribute is only present on those versions.
+ from OpenSSL.crypto import X509
+
+ x509 = X509()
+ if getattr(x509, "_x509", None) is None:
+ raise ImportError(
+ "'pyOpenSSL' module missing required functionality. "
+ "Try upgrading to v0.14 or newer."
+ )
+
+
+def _dnsname_to_stdlib(name):
+ """
+ Converts a dNSName SubjectAlternativeName field to the form used by the
+ standard library on the given Python version.
+
+ Cryptography produces a dNSName as a unicode string that was idna-decoded
+ from ASCII bytes. We need to idna-encode that string to get it back, and
+ then on Python 3 we also need to convert to unicode via UTF-8 (the stdlib
+ uses PyUnicode_FromStringAndSize on it, which decodes via UTF-8).
+
+ If the name cannot be idna-encoded then we return None signalling that
+ the name given should be skipped.
+ """
+
+ def idna_encode(name):
+ """
+ Borrowed wholesale from the Python Cryptography Project. It turns out
+ that we can't just safely call `idna.encode`: it can explode for
+ wildcard names. This avoids that problem.
+ """
+ import idna
+
+ try:
+ for prefix in [u"*.", u"."]:
+ if name.startswith(prefix):
+ name = name[len(prefix) :]
+ return prefix.encode("ascii") + idna.encode(name)
+ return idna.encode(name)
+ except idna.core.IDNAError:
+ return None
+
+ # Don't send IPv6 addresses through the IDNA encoder.
+ if ":" in name:
+ return name
+
+ name = idna_encode(name)
+ if name is None:
+ return None
+ elif sys.version_info >= (3, 0):
+ name = name.decode("utf-8")
+ return name
+
+
+def get_subj_alt_name(peer_cert):
+ """
+ Given an PyOpenSSL certificate, provides all the subject alternative names.
+ """
+ # Pass the cert to cryptography, which has much better APIs for this.
+ if hasattr(peer_cert, "to_cryptography"):
+ cert = peer_cert.to_cryptography()
+ else:
+ # This is technically using private APIs, but should work across all
+ # relevant versions before PyOpenSSL got a proper API for this.
+ cert = _Certificate(openssl_backend, peer_cert._x509)
+
+ # We want to find the SAN extension. Ask Cryptography to locate it (it's
+ # faster than looping in Python)
+ try:
+ ext = cert.extensions.get_extension_for_class(x509.SubjectAlternativeName).value
+ except x509.ExtensionNotFound:
+ # No such extension, return the empty list.
+ return []
+ except (
+ x509.DuplicateExtension,
+ UnsupportedExtension,
+ x509.UnsupportedGeneralNameType,
+ UnicodeError,
+ ) as e:
+ # A problem has been found with the quality of the certificate. Assume
+ # no SAN field is present.
+ log.warning(
+ "A problem was encountered with the certificate that prevented "
+ "urllib3 from finding the SubjectAlternativeName field. This can "
+ "affect certificate validation. The error was %s",
+ e,
+ )
+ return []
+
+ # We want to return dNSName and iPAddress fields. We need to cast the IPs
+ # back to strings because the match_hostname function wants them as
+ # strings.
+ # Sadly the DNS names need to be idna encoded and then, on Python 3, UTF-8
+ # decoded. This is pretty frustrating, but that's what the standard library
+ # does with certificates, and so we need to attempt to do the same.
+ # We also want to skip over names which cannot be idna encoded.
+ names = [
+ ("DNS", name)
+ for name in map(_dnsname_to_stdlib, ext.get_values_for_type(x509.DNSName))
+ if name is not None
+ ]
+ names.extend(
+ ("IP Address", str(name)) for name in ext.get_values_for_type(x509.IPAddress)
+ )
+
+ return names
+
+
+class WrappedSocket(object):
+ """API-compatibility wrapper for Python OpenSSL's Connection-class.
+
+ Note: _makefile_refs, _drop() and _reuse() are needed for the garbage
+ collector of pypy.
+ """
+
+ def __init__(self, connection, socket, suppress_ragged_eofs=True):
+ self.connection = connection
+ self.socket = socket
+ self.suppress_ragged_eofs = suppress_ragged_eofs
+ self._makefile_refs = 0
+ self._closed = False
+
+ def fileno(self):
+ return self.socket.fileno()
+
+ # Copy-pasted from Python 3.5 source code
+ def _decref_socketios(self):
+ if self._makefile_refs > 0:
+ self._makefile_refs -= 1
+ if self._closed:
+ self.close()
+
+ def recv(self, *args, **kwargs):
+ try:
+ data = self.connection.recv(*args, **kwargs)
+ except OpenSSL.SSL.SysCallError as e:
+ if self.suppress_ragged_eofs and e.args == (-1, "Unexpected EOF"):
+ return b""
+ else:
+ raise SocketError(str(e))
+ except OpenSSL.SSL.ZeroReturnError:
+ if self.connection.get_shutdown() == OpenSSL.SSL.RECEIVED_SHUTDOWN:
+ return b""
+ else:
+ raise
+ except OpenSSL.SSL.WantReadError:
+ if not util.wait_for_read(self.socket, self.socket.gettimeout()):
+ raise timeout("The read operation timed out")
+ else:
+ return self.recv(*args, **kwargs)
+
+ # TLS 1.3 post-handshake authentication
+ except OpenSSL.SSL.Error as e:
+ raise ssl.SSLError("read error: %r" % e)
+ else:
+ return data
+
+ def recv_into(self, *args, **kwargs):
+ try:
+ return self.connection.recv_into(*args, **kwargs)
+ except OpenSSL.SSL.SysCallError as e:
+ if self.suppress_ragged_eofs and e.args == (-1, "Unexpected EOF"):
+ return 0
+ else:
+ raise SocketError(str(e))
+ except OpenSSL.SSL.ZeroReturnError:
+ if self.connection.get_shutdown() == OpenSSL.SSL.RECEIVED_SHUTDOWN:
+ return 0
+ else:
+ raise
+ except OpenSSL.SSL.WantReadError:
+ if not util.wait_for_read(self.socket, self.socket.gettimeout()):
+ raise timeout("The read operation timed out")
+ else:
+ return self.recv_into(*args, **kwargs)
+
+ # TLS 1.3 post-handshake authentication
+ except OpenSSL.SSL.Error as e:
+ raise ssl.SSLError("read error: %r" % e)
+
+ def settimeout(self, timeout):
+ return self.socket.settimeout(timeout)
+
+ def _send_until_done(self, data):
+ while True:
+ try:
+ return self.connection.send(data)
+ except OpenSSL.SSL.WantWriteError:
+ if not util.wait_for_write(self.socket, self.socket.gettimeout()):
+ raise timeout()
+ continue
+ except OpenSSL.SSL.SysCallError as e:
+ raise SocketError(str(e))
+
+ def sendall(self, data):
+ total_sent = 0
+ while total_sent < len(data):
+ sent = self._send_until_done(
+ data[total_sent : total_sent + SSL_WRITE_BLOCKSIZE]
+ )
+ total_sent += sent
+
+ def shutdown(self):
+ # FIXME rethrow compatible exceptions should we ever use this
+ self.connection.shutdown()
+
+ def close(self):
+ if self._makefile_refs < 1:
+ try:
+ self._closed = True
+ return self.connection.close()
+ except OpenSSL.SSL.Error:
+ return
+ else:
+ self._makefile_refs -= 1
+
+ def getpeercert(self, binary_form=False):
+ x509 = self.connection.get_peer_certificate()
+
+ if not x509:
+ return x509
+
+ if binary_form:
+ return OpenSSL.crypto.dump_certificate(OpenSSL.crypto.FILETYPE_ASN1, x509)
+
+ return {
+ "subject": ((("commonName", x509.get_subject().CN),),),
+ "subjectAltName": get_subj_alt_name(x509),
+ }
+
+ def version(self):
+ return self.connection.get_protocol_version_name()
+
+ def _reuse(self):
+ self._makefile_refs += 1
+
+ def _drop(self):
+ if self._makefile_refs < 1:
+ self.close()
+ else:
+ self._makefile_refs -= 1
+
+
+if _fileobject: # Platform-specific: Python 2
+
+ def makefile(self, mode, bufsize=-1):
+ self._makefile_refs += 1
+ return _fileobject(self, mode, bufsize, close=True)
+
+
+else: # Platform-specific: Python 3
+ makefile = backport_makefile
+
+WrappedSocket.makefile = makefile
+
+
+class PyOpenSSLContext(object):
+ """
+ I am a wrapper class for the PyOpenSSL ``Context`` object. I am responsible
+ for translating the interface of the standard library ``SSLContext`` object
+ to calls into PyOpenSSL.
+ """
+
+ def __init__(self, protocol):
+ self.protocol = _openssl_versions[protocol]
+ self._ctx = OpenSSL.SSL.Context(self.protocol)
+ self._options = 0
+ self.check_hostname = False
+
+ @property
+ def options(self):
+ return self._options
+
+ @options.setter
+ def options(self, value):
+ self._options = value
+ self._ctx.set_options(value)
+
+ @property
+ def verify_mode(self):
+ return _openssl_to_stdlib_verify[self._ctx.get_verify_mode()]
+
+ @verify_mode.setter
+ def verify_mode(self, value):
+ self._ctx.set_verify(_stdlib_to_openssl_verify[value], _verify_callback)
+
+ def set_default_verify_paths(self):
+ self._ctx.set_default_verify_paths()
+
+ def set_ciphers(self, ciphers):
+ if isinstance(ciphers, six.text_type):
+ ciphers = ciphers.encode("utf-8")
+ self._ctx.set_cipher_list(ciphers)
+
+ def load_verify_locations(self, cafile=None, capath=None, cadata=None):
+ if cafile is not None:
+ cafile = cafile.encode("utf-8")
+ if capath is not None:
+ capath = capath.encode("utf-8")
+ try:
+ self._ctx.load_verify_locations(cafile, capath)
+ if cadata is not None:
+ self._ctx.load_verify_locations(BytesIO(cadata))
+ except OpenSSL.SSL.Error as e:
+ raise ssl.SSLError("unable to load trusted certificates: %r" % e)
+
+ def load_cert_chain(self, certfile, keyfile=None, password=None):
+ self._ctx.use_certificate_chain_file(certfile)
+ if password is not None:
+ if not isinstance(password, six.binary_type):
+ password = password.encode("utf-8")
+ self._ctx.set_passwd_cb(lambda *_: password)
+ self._ctx.use_privatekey_file(keyfile or certfile)
+
+ def set_alpn_protocols(self, protocols):
+ protocols = [six.ensure_binary(p) for p in protocols]
+ return self._ctx.set_alpn_protos(protocols)
+
+ def wrap_socket(
+ self,
+ sock,
+ server_side=False,
+ do_handshake_on_connect=True,
+ suppress_ragged_eofs=True,
+ server_hostname=None,
+ ):
+ cnx = OpenSSL.SSL.Connection(self._ctx, sock)
+
+ if isinstance(server_hostname, six.text_type): # Platform-specific: Python 3
+ server_hostname = server_hostname.encode("utf-8")
+
+ if server_hostname is not None:
+ cnx.set_tlsext_host_name(server_hostname)
+
+ cnx.set_connect_state()
+
+ while True:
+ try:
+ cnx.do_handshake()
+ except OpenSSL.SSL.WantReadError:
+ if not util.wait_for_read(sock, sock.gettimeout()):
+ raise timeout("select timed out")
+ continue
+ except OpenSSL.SSL.Error as e:
+ raise ssl.SSLError("bad handshake: %r" % e)
+ break
+
+ return WrappedSocket(cnx, sock)
+
+
+def _verify_callback(cnx, x509, err_no, err_depth, return_code):
+ return err_no == 0
diff --git a/openpype/hosts/fusion/vendor/urllib3/contrib/securetransport.py b/openpype/hosts/fusion/vendor/urllib3/contrib/securetransport.py
new file mode 100644
index 0000000000..554c015fed
--- /dev/null
+++ b/openpype/hosts/fusion/vendor/urllib3/contrib/securetransport.py
@@ -0,0 +1,922 @@
+"""
+SecureTranport support for urllib3 via ctypes.
+
+This makes platform-native TLS available to urllib3 users on macOS without the
+use of a compiler. This is an important feature because the Python Package
+Index is moving to become a TLSv1.2-or-higher server, and the default OpenSSL
+that ships with macOS is not capable of doing TLSv1.2. The only way to resolve
+this is to give macOS users an alternative solution to the problem, and that
+solution is to use SecureTransport.
+
+We use ctypes here because this solution must not require a compiler. That's
+because pip is not allowed to require a compiler either.
+
+This is not intended to be a seriously long-term solution to this problem.
+The hope is that PEP 543 will eventually solve this issue for us, at which
+point we can retire this contrib module. But in the short term, we need to
+solve the impending tire fire that is Python on Mac without this kind of
+contrib module. So...here we are.
+
+To use this module, simply import and inject it::
+
+ import urllib3.contrib.securetransport
+ urllib3.contrib.securetransport.inject_into_urllib3()
+
+Happy TLSing!
+
+This code is a bastardised version of the code found in Will Bond's oscrypto
+library. An enormous debt is owed to him for blazing this trail for us. For
+that reason, this code should be considered to be covered both by urllib3's
+license and by oscrypto's:
+
+.. code-block::
+
+ Copyright (c) 2015-2016 Will Bond
+
+ Permission is hereby granted, free of charge, to any person obtaining a
+ copy of this software and associated documentation files (the "Software"),
+ to deal in the Software without restriction, including without limitation
+ the rights to use, copy, modify, merge, publish, distribute, sublicense,
+ and/or sell copies of the Software, and to permit persons to whom the
+ Software is furnished to do so, subject to the following conditions:
+
+ The above copyright notice and this permission notice shall be included in
+ all copies or substantial portions of the Software.
+
+ THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+ IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+ FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+ AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+ LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+ FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
+ DEALINGS IN THE SOFTWARE.
+"""
+from __future__ import absolute_import
+
+import contextlib
+import ctypes
+import errno
+import os.path
+import shutil
+import socket
+import ssl
+import struct
+import threading
+import weakref
+
+import six
+
+from .. import util
+from ..util.ssl_ import PROTOCOL_TLS_CLIENT
+from ._securetransport.bindings import CoreFoundation, Security, SecurityConst
+from ._securetransport.low_level import (
+ _assert_no_error,
+ _build_tls_unknown_ca_alert,
+ _cert_array_from_pem,
+ _create_cfstring_array,
+ _load_client_cert_chain,
+ _temporary_keychain,
+)
+
+try: # Platform-specific: Python 2
+ from socket import _fileobject
+except ImportError: # Platform-specific: Python 3
+ _fileobject = None
+ from ..packages.backports.makefile import backport_makefile
+
+__all__ = ["inject_into_urllib3", "extract_from_urllib3"]
+
+# SNI always works
+HAS_SNI = True
+
+orig_util_HAS_SNI = util.HAS_SNI
+orig_util_SSLContext = util.ssl_.SSLContext
+
+# This dictionary is used by the read callback to obtain a handle to the
+# calling wrapped socket. This is a pretty silly approach, but for now it'll
+# do. I feel like I should be able to smuggle a handle to the wrapped socket
+# directly in the SSLConnectionRef, but for now this approach will work I
+# guess.
+#
+# We need to lock around this structure for inserts, but we don't do it for
+# reads/writes in the callbacks. The reasoning here goes as follows:
+#
+# 1. It is not possible to call into the callbacks before the dictionary is
+# populated, so once in the callback the id must be in the dictionary.
+# 2. The callbacks don't mutate the dictionary, they only read from it, and
+# so cannot conflict with any of the insertions.
+#
+# This is good: if we had to lock in the callbacks we'd drastically slow down
+# the performance of this code.
+_connection_refs = weakref.WeakValueDictionary()
+_connection_ref_lock = threading.Lock()
+
+# Limit writes to 16kB. This is OpenSSL's limit, but we'll cargo-cult it over
+# for no better reason than we need *a* limit, and this one is right there.
+SSL_WRITE_BLOCKSIZE = 16384
+
+# This is our equivalent of util.ssl_.DEFAULT_CIPHERS, but expanded out to
+# individual cipher suites. We need to do this because this is how
+# SecureTransport wants them.
+CIPHER_SUITES = [
+ SecurityConst.TLS_ECDHE_ECDSA_WITH_AES_256_GCM_SHA384,
+ SecurityConst.TLS_ECDHE_ECDSA_WITH_AES_128_GCM_SHA256,
+ SecurityConst.TLS_ECDHE_RSA_WITH_AES_256_GCM_SHA384,
+ SecurityConst.TLS_ECDHE_RSA_WITH_AES_128_GCM_SHA256,
+ SecurityConst.TLS_ECDHE_ECDSA_WITH_CHACHA20_POLY1305_SHA256,
+ SecurityConst.TLS_ECDHE_RSA_WITH_CHACHA20_POLY1305_SHA256,
+ SecurityConst.TLS_DHE_RSA_WITH_AES_256_GCM_SHA384,
+ SecurityConst.TLS_DHE_RSA_WITH_AES_128_GCM_SHA256,
+ SecurityConst.TLS_ECDHE_ECDSA_WITH_AES_256_CBC_SHA384,
+ SecurityConst.TLS_ECDHE_ECDSA_WITH_AES_256_CBC_SHA,
+ SecurityConst.TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256,
+ SecurityConst.TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA,
+ SecurityConst.TLS_ECDHE_RSA_WITH_AES_256_CBC_SHA384,
+ SecurityConst.TLS_ECDHE_RSA_WITH_AES_256_CBC_SHA,
+ SecurityConst.TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256,
+ SecurityConst.TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA,
+ SecurityConst.TLS_DHE_RSA_WITH_AES_256_CBC_SHA256,
+ SecurityConst.TLS_DHE_RSA_WITH_AES_256_CBC_SHA,
+ SecurityConst.TLS_DHE_RSA_WITH_AES_128_CBC_SHA256,
+ SecurityConst.TLS_DHE_RSA_WITH_AES_128_CBC_SHA,
+ SecurityConst.TLS_AES_256_GCM_SHA384,
+ SecurityConst.TLS_AES_128_GCM_SHA256,
+ SecurityConst.TLS_RSA_WITH_AES_256_GCM_SHA384,
+ SecurityConst.TLS_RSA_WITH_AES_128_GCM_SHA256,
+ SecurityConst.TLS_AES_128_CCM_8_SHA256,
+ SecurityConst.TLS_AES_128_CCM_SHA256,
+ SecurityConst.TLS_RSA_WITH_AES_256_CBC_SHA256,
+ SecurityConst.TLS_RSA_WITH_AES_128_CBC_SHA256,
+ SecurityConst.TLS_RSA_WITH_AES_256_CBC_SHA,
+ SecurityConst.TLS_RSA_WITH_AES_128_CBC_SHA,
+]
+
+# Basically this is simple: for PROTOCOL_SSLv23 we turn it into a low of
+# TLSv1 and a high of TLSv1.2. For everything else, we pin to that version.
+# TLSv1 to 1.2 are supported on macOS 10.8+
+_protocol_to_min_max = {
+ util.PROTOCOL_TLS: (SecurityConst.kTLSProtocol1, SecurityConst.kTLSProtocol12),
+ PROTOCOL_TLS_CLIENT: (SecurityConst.kTLSProtocol1, SecurityConst.kTLSProtocol12),
+}
+
+if hasattr(ssl, "PROTOCOL_SSLv2"):
+ _protocol_to_min_max[ssl.PROTOCOL_SSLv2] = (
+ SecurityConst.kSSLProtocol2,
+ SecurityConst.kSSLProtocol2,
+ )
+if hasattr(ssl, "PROTOCOL_SSLv3"):
+ _protocol_to_min_max[ssl.PROTOCOL_SSLv3] = (
+ SecurityConst.kSSLProtocol3,
+ SecurityConst.kSSLProtocol3,
+ )
+if hasattr(ssl, "PROTOCOL_TLSv1"):
+ _protocol_to_min_max[ssl.PROTOCOL_TLSv1] = (
+ SecurityConst.kTLSProtocol1,
+ SecurityConst.kTLSProtocol1,
+ )
+if hasattr(ssl, "PROTOCOL_TLSv1_1"):
+ _protocol_to_min_max[ssl.PROTOCOL_TLSv1_1] = (
+ SecurityConst.kTLSProtocol11,
+ SecurityConst.kTLSProtocol11,
+ )
+if hasattr(ssl, "PROTOCOL_TLSv1_2"):
+ _protocol_to_min_max[ssl.PROTOCOL_TLSv1_2] = (
+ SecurityConst.kTLSProtocol12,
+ SecurityConst.kTLSProtocol12,
+ )
+
+
+def inject_into_urllib3():
+ """
+ Monkey-patch urllib3 with SecureTransport-backed SSL-support.
+ """
+ util.SSLContext = SecureTransportContext
+ util.ssl_.SSLContext = SecureTransportContext
+ util.HAS_SNI = HAS_SNI
+ util.ssl_.HAS_SNI = HAS_SNI
+ util.IS_SECURETRANSPORT = True
+ util.ssl_.IS_SECURETRANSPORT = True
+
+
+def extract_from_urllib3():
+ """
+ Undo monkey-patching by :func:`inject_into_urllib3`.
+ """
+ util.SSLContext = orig_util_SSLContext
+ util.ssl_.SSLContext = orig_util_SSLContext
+ util.HAS_SNI = orig_util_HAS_SNI
+ util.ssl_.HAS_SNI = orig_util_HAS_SNI
+ util.IS_SECURETRANSPORT = False
+ util.ssl_.IS_SECURETRANSPORT = False
+
+
+def _read_callback(connection_id, data_buffer, data_length_pointer):
+ """
+ SecureTransport read callback. This is called by ST to request that data
+ be returned from the socket.
+ """
+ wrapped_socket = None
+ try:
+ wrapped_socket = _connection_refs.get(connection_id)
+ if wrapped_socket is None:
+ return SecurityConst.errSSLInternal
+ base_socket = wrapped_socket.socket
+
+ requested_length = data_length_pointer[0]
+
+ timeout = wrapped_socket.gettimeout()
+ error = None
+ read_count = 0
+
+ try:
+ while read_count < requested_length:
+ if timeout is None or timeout >= 0:
+ if not util.wait_for_read(base_socket, timeout):
+ raise socket.error(errno.EAGAIN, "timed out")
+
+ remaining = requested_length - read_count
+ buffer = (ctypes.c_char * remaining).from_address(
+ data_buffer + read_count
+ )
+ chunk_size = base_socket.recv_into(buffer, remaining)
+ read_count += chunk_size
+ if not chunk_size:
+ if not read_count:
+ return SecurityConst.errSSLClosedGraceful
+ break
+ except (socket.error) as e:
+ error = e.errno
+
+ if error is not None and error != errno.EAGAIN:
+ data_length_pointer[0] = read_count
+ if error == errno.ECONNRESET or error == errno.EPIPE:
+ return SecurityConst.errSSLClosedAbort
+ raise
+
+ data_length_pointer[0] = read_count
+
+ if read_count != requested_length:
+ return SecurityConst.errSSLWouldBlock
+
+ return 0
+ except Exception as e:
+ if wrapped_socket is not None:
+ wrapped_socket._exception = e
+ return SecurityConst.errSSLInternal
+
+
+def _write_callback(connection_id, data_buffer, data_length_pointer):
+ """
+ SecureTransport write callback. This is called by ST to request that data
+ actually be sent on the network.
+ """
+ wrapped_socket = None
+ try:
+ wrapped_socket = _connection_refs.get(connection_id)
+ if wrapped_socket is None:
+ return SecurityConst.errSSLInternal
+ base_socket = wrapped_socket.socket
+
+ bytes_to_write = data_length_pointer[0]
+ data = ctypes.string_at(data_buffer, bytes_to_write)
+
+ timeout = wrapped_socket.gettimeout()
+ error = None
+ sent = 0
+
+ try:
+ while sent < bytes_to_write:
+ if timeout is None or timeout >= 0:
+ if not util.wait_for_write(base_socket, timeout):
+ raise socket.error(errno.EAGAIN, "timed out")
+ chunk_sent = base_socket.send(data)
+ sent += chunk_sent
+
+ # This has some needless copying here, but I'm not sure there's
+ # much value in optimising this data path.
+ data = data[chunk_sent:]
+ except (socket.error) as e:
+ error = e.errno
+
+ if error is not None and error != errno.EAGAIN:
+ data_length_pointer[0] = sent
+ if error == errno.ECONNRESET or error == errno.EPIPE:
+ return SecurityConst.errSSLClosedAbort
+ raise
+
+ data_length_pointer[0] = sent
+
+ if sent != bytes_to_write:
+ return SecurityConst.errSSLWouldBlock
+
+ return 0
+ except Exception as e:
+ if wrapped_socket is not None:
+ wrapped_socket._exception = e
+ return SecurityConst.errSSLInternal
+
+
+# We need to keep these two objects references alive: if they get GC'd while
+# in use then SecureTransport could attempt to call a function that is in freed
+# memory. That would be...uh...bad. Yeah, that's the word. Bad.
+_read_callback_pointer = Security.SSLReadFunc(_read_callback)
+_write_callback_pointer = Security.SSLWriteFunc(_write_callback)
+
+
+class WrappedSocket(object):
+ """
+ API-compatibility wrapper for Python's OpenSSL wrapped socket object.
+
+ Note: _makefile_refs, _drop(), and _reuse() are needed for the garbage
+ collector of PyPy.
+ """
+
+ def __init__(self, socket):
+ self.socket = socket
+ self.context = None
+ self._makefile_refs = 0
+ self._closed = False
+ self._exception = None
+ self._keychain = None
+ self._keychain_dir = None
+ self._client_cert_chain = None
+
+ # We save off the previously-configured timeout and then set it to
+ # zero. This is done because we use select and friends to handle the
+ # timeouts, but if we leave the timeout set on the lower socket then
+ # Python will "kindly" call select on that socket again for us. Avoid
+ # that by forcing the timeout to zero.
+ self._timeout = self.socket.gettimeout()
+ self.socket.settimeout(0)
+
+ @contextlib.contextmanager
+ def _raise_on_error(self):
+ """
+ A context manager that can be used to wrap calls that do I/O from
+ SecureTransport. If any of the I/O callbacks hit an exception, this
+ context manager will correctly propagate the exception after the fact.
+ This avoids silently swallowing those exceptions.
+
+ It also correctly forces the socket closed.
+ """
+ self._exception = None
+
+ # We explicitly don't catch around this yield because in the unlikely
+ # event that an exception was hit in the block we don't want to swallow
+ # it.
+ yield
+ if self._exception is not None:
+ exception, self._exception = self._exception, None
+ self.close()
+ raise exception
+
+ def _set_ciphers(self):
+ """
+ Sets up the allowed ciphers. By default this matches the set in
+ util.ssl_.DEFAULT_CIPHERS, at least as supported by macOS. This is done
+ custom and doesn't allow changing at this time, mostly because parsing
+ OpenSSL cipher strings is going to be a freaking nightmare.
+ """
+ ciphers = (Security.SSLCipherSuite * len(CIPHER_SUITES))(*CIPHER_SUITES)
+ result = Security.SSLSetEnabledCiphers(
+ self.context, ciphers, len(CIPHER_SUITES)
+ )
+ _assert_no_error(result)
+
+ def _set_alpn_protocols(self, protocols):
+ """
+ Sets up the ALPN protocols on the context.
+ """
+ if not protocols:
+ return
+ protocols_arr = _create_cfstring_array(protocols)
+ try:
+ result = Security.SSLSetALPNProtocols(self.context, protocols_arr)
+ _assert_no_error(result)
+ finally:
+ CoreFoundation.CFRelease(protocols_arr)
+
+ def _custom_validate(self, verify, trust_bundle):
+ """
+ Called when we have set custom validation. We do this in two cases:
+ first, when cert validation is entirely disabled; and second, when
+ using a custom trust DB.
+ Raises an SSLError if the connection is not trusted.
+ """
+ # If we disabled cert validation, just say: cool.
+ if not verify:
+ return
+
+ successes = (
+ SecurityConst.kSecTrustResultUnspecified,
+ SecurityConst.kSecTrustResultProceed,
+ )
+ try:
+ trust_result = self._evaluate_trust(trust_bundle)
+ if trust_result in successes:
+ return
+ reason = "error code: %d" % (trust_result,)
+ except Exception as e:
+ # Do not trust on error
+ reason = "exception: %r" % (e,)
+
+ # SecureTransport does not send an alert nor shuts down the connection.
+ rec = _build_tls_unknown_ca_alert(self.version())
+ self.socket.sendall(rec)
+ # close the connection immediately
+ # l_onoff = 1, activate linger
+ # l_linger = 0, linger for 0 seoncds
+ opts = struct.pack("ii", 1, 0)
+ self.socket.setsockopt(socket.SOL_SOCKET, socket.SO_LINGER, opts)
+ self.close()
+ raise ssl.SSLError("certificate verify failed, %s" % reason)
+
+ def _evaluate_trust(self, trust_bundle):
+ # We want data in memory, so load it up.
+ if os.path.isfile(trust_bundle):
+ with open(trust_bundle, "rb") as f:
+ trust_bundle = f.read()
+
+ cert_array = None
+ trust = Security.SecTrustRef()
+
+ try:
+ # Get a CFArray that contains the certs we want.
+ cert_array = _cert_array_from_pem(trust_bundle)
+
+ # Ok, now the hard part. We want to get the SecTrustRef that ST has
+ # created for this connection, shove our CAs into it, tell ST to
+ # ignore everything else it knows, and then ask if it can build a
+ # chain. This is a buuuunch of code.
+ result = Security.SSLCopyPeerTrust(self.context, ctypes.byref(trust))
+ _assert_no_error(result)
+ if not trust:
+ raise ssl.SSLError("Failed to copy trust reference")
+
+ result = Security.SecTrustSetAnchorCertificates(trust, cert_array)
+ _assert_no_error(result)
+
+ result = Security.SecTrustSetAnchorCertificatesOnly(trust, True)
+ _assert_no_error(result)
+
+ trust_result = Security.SecTrustResultType()
+ result = Security.SecTrustEvaluate(trust, ctypes.byref(trust_result))
+ _assert_no_error(result)
+ finally:
+ if trust:
+ CoreFoundation.CFRelease(trust)
+
+ if cert_array is not None:
+ CoreFoundation.CFRelease(cert_array)
+
+ return trust_result.value
+
+ def handshake(
+ self,
+ server_hostname,
+ verify,
+ trust_bundle,
+ min_version,
+ max_version,
+ client_cert,
+ client_key,
+ client_key_passphrase,
+ alpn_protocols,
+ ):
+ """
+ Actually performs the TLS handshake. This is run automatically by
+ wrapped socket, and shouldn't be needed in user code.
+ """
+ # First, we do the initial bits of connection setup. We need to create
+ # a context, set its I/O funcs, and set the connection reference.
+ self.context = Security.SSLCreateContext(
+ None, SecurityConst.kSSLClientSide, SecurityConst.kSSLStreamType
+ )
+ result = Security.SSLSetIOFuncs(
+ self.context, _read_callback_pointer, _write_callback_pointer
+ )
+ _assert_no_error(result)
+
+ # Here we need to compute the handle to use. We do this by taking the
+ # id of self modulo 2**31 - 1. If this is already in the dictionary, we
+ # just keep incrementing by one until we find a free space.
+ with _connection_ref_lock:
+ handle = id(self) % 2147483647
+ while handle in _connection_refs:
+ handle = (handle + 1) % 2147483647
+ _connection_refs[handle] = self
+
+ result = Security.SSLSetConnection(self.context, handle)
+ _assert_no_error(result)
+
+ # If we have a server hostname, we should set that too.
+ if server_hostname:
+ if not isinstance(server_hostname, bytes):
+ server_hostname = server_hostname.encode("utf-8")
+
+ result = Security.SSLSetPeerDomainName(
+ self.context, server_hostname, len(server_hostname)
+ )
+ _assert_no_error(result)
+
+ # Setup the ciphers.
+ self._set_ciphers()
+
+ # Setup the ALPN protocols.
+ self._set_alpn_protocols(alpn_protocols)
+
+ # Set the minimum and maximum TLS versions.
+ result = Security.SSLSetProtocolVersionMin(self.context, min_version)
+ _assert_no_error(result)
+
+ result = Security.SSLSetProtocolVersionMax(self.context, max_version)
+ _assert_no_error(result)
+
+ # If there's a trust DB, we need to use it. We do that by telling
+ # SecureTransport to break on server auth. We also do that if we don't
+ # want to validate the certs at all: we just won't actually do any
+ # authing in that case.
+ if not verify or trust_bundle is not None:
+ result = Security.SSLSetSessionOption(
+ self.context, SecurityConst.kSSLSessionOptionBreakOnServerAuth, True
+ )
+ _assert_no_error(result)
+
+ # If there's a client cert, we need to use it.
+ if client_cert:
+ self._keychain, self._keychain_dir = _temporary_keychain()
+ self._client_cert_chain = _load_client_cert_chain(
+ self._keychain, client_cert, client_key
+ )
+ result = Security.SSLSetCertificate(self.context, self._client_cert_chain)
+ _assert_no_error(result)
+
+ while True:
+ with self._raise_on_error():
+ result = Security.SSLHandshake(self.context)
+
+ if result == SecurityConst.errSSLWouldBlock:
+ raise socket.timeout("handshake timed out")
+ elif result == SecurityConst.errSSLServerAuthCompleted:
+ self._custom_validate(verify, trust_bundle)
+ continue
+ else:
+ _assert_no_error(result)
+ break
+
+ def fileno(self):
+ return self.socket.fileno()
+
+ # Copy-pasted from Python 3.5 source code
+ def _decref_socketios(self):
+ if self._makefile_refs > 0:
+ self._makefile_refs -= 1
+ if self._closed:
+ self.close()
+
+ def recv(self, bufsiz):
+ buffer = ctypes.create_string_buffer(bufsiz)
+ bytes_read = self.recv_into(buffer, bufsiz)
+ data = buffer[:bytes_read]
+ return data
+
+ def recv_into(self, buffer, nbytes=None):
+ # Read short on EOF.
+ if self._closed:
+ return 0
+
+ if nbytes is None:
+ nbytes = len(buffer)
+
+ buffer = (ctypes.c_char * nbytes).from_buffer(buffer)
+ processed_bytes = ctypes.c_size_t(0)
+
+ with self._raise_on_error():
+ result = Security.SSLRead(
+ self.context, buffer, nbytes, ctypes.byref(processed_bytes)
+ )
+
+ # There are some result codes that we want to treat as "not always
+ # errors". Specifically, those are errSSLWouldBlock,
+ # errSSLClosedGraceful, and errSSLClosedNoNotify.
+ if result == SecurityConst.errSSLWouldBlock:
+ # If we didn't process any bytes, then this was just a time out.
+ # However, we can get errSSLWouldBlock in situations when we *did*
+ # read some data, and in those cases we should just read "short"
+ # and return.
+ if processed_bytes.value == 0:
+ # Timed out, no data read.
+ raise socket.timeout("recv timed out")
+ elif result in (
+ SecurityConst.errSSLClosedGraceful,
+ SecurityConst.errSSLClosedNoNotify,
+ ):
+ # The remote peer has closed this connection. We should do so as
+ # well. Note that we don't actually return here because in
+ # principle this could actually be fired along with return data.
+ # It's unlikely though.
+ self.close()
+ else:
+ _assert_no_error(result)
+
+ # Ok, we read and probably succeeded. We should return whatever data
+ # was actually read.
+ return processed_bytes.value
+
+ def settimeout(self, timeout):
+ self._timeout = timeout
+
+ def gettimeout(self):
+ return self._timeout
+
+ def send(self, data):
+ processed_bytes = ctypes.c_size_t(0)
+
+ with self._raise_on_error():
+ result = Security.SSLWrite(
+ self.context, data, len(data), ctypes.byref(processed_bytes)
+ )
+
+ if result == SecurityConst.errSSLWouldBlock and processed_bytes.value == 0:
+ # Timed out
+ raise socket.timeout("send timed out")
+ else:
+ _assert_no_error(result)
+
+ # We sent, and probably succeeded. Tell them how much we sent.
+ return processed_bytes.value
+
+ def sendall(self, data):
+ total_sent = 0
+ while total_sent < len(data):
+ sent = self.send(data[total_sent : total_sent + SSL_WRITE_BLOCKSIZE])
+ total_sent += sent
+
+ def shutdown(self):
+ with self._raise_on_error():
+ Security.SSLClose(self.context)
+
+ def close(self):
+ # TODO: should I do clean shutdown here? Do I have to?
+ if self._makefile_refs < 1:
+ self._closed = True
+ if self.context:
+ CoreFoundation.CFRelease(self.context)
+ self.context = None
+ if self._client_cert_chain:
+ CoreFoundation.CFRelease(self._client_cert_chain)
+ self._client_cert_chain = None
+ if self._keychain:
+ Security.SecKeychainDelete(self._keychain)
+ CoreFoundation.CFRelease(self._keychain)
+ shutil.rmtree(self._keychain_dir)
+ self._keychain = self._keychain_dir = None
+ return self.socket.close()
+ else:
+ self._makefile_refs -= 1
+
+ def getpeercert(self, binary_form=False):
+ # Urgh, annoying.
+ #
+ # Here's how we do this:
+ #
+ # 1. Call SSLCopyPeerTrust to get hold of the trust object for this
+ # connection.
+ # 2. Call SecTrustGetCertificateAtIndex for index 0 to get the leaf.
+ # 3. To get the CN, call SecCertificateCopyCommonName and process that
+ # string so that it's of the appropriate type.
+ # 4. To get the SAN, we need to do something a bit more complex:
+ # a. Call SecCertificateCopyValues to get the data, requesting
+ # kSecOIDSubjectAltName.
+ # b. Mess about with this dictionary to try to get the SANs out.
+ #
+ # This is gross. Really gross. It's going to be a few hundred LoC extra
+ # just to repeat something that SecureTransport can *already do*. So my
+ # operating assumption at this time is that what we want to do is
+ # instead to just flag to urllib3 that it shouldn't do its own hostname
+ # validation when using SecureTransport.
+ if not binary_form:
+ raise ValueError("SecureTransport only supports dumping binary certs")
+ trust = Security.SecTrustRef()
+ certdata = None
+ der_bytes = None
+
+ try:
+ # Grab the trust store.
+ result = Security.SSLCopyPeerTrust(self.context, ctypes.byref(trust))
+ _assert_no_error(result)
+ if not trust:
+ # Probably we haven't done the handshake yet. No biggie.
+ return None
+
+ cert_count = Security.SecTrustGetCertificateCount(trust)
+ if not cert_count:
+ # Also a case that might happen if we haven't handshaked.
+ # Handshook? Handshaken?
+ return None
+
+ leaf = Security.SecTrustGetCertificateAtIndex(trust, 0)
+ assert leaf
+
+ # Ok, now we want the DER bytes.
+ certdata = Security.SecCertificateCopyData(leaf)
+ assert certdata
+
+ data_length = CoreFoundation.CFDataGetLength(certdata)
+ data_buffer = CoreFoundation.CFDataGetBytePtr(certdata)
+ der_bytes = ctypes.string_at(data_buffer, data_length)
+ finally:
+ if certdata:
+ CoreFoundation.CFRelease(certdata)
+ if trust:
+ CoreFoundation.CFRelease(trust)
+
+ return der_bytes
+
+ def version(self):
+ protocol = Security.SSLProtocol()
+ result = Security.SSLGetNegotiatedProtocolVersion(
+ self.context, ctypes.byref(protocol)
+ )
+ _assert_no_error(result)
+ if protocol.value == SecurityConst.kTLSProtocol13:
+ raise ssl.SSLError("SecureTransport does not support TLS 1.3")
+ elif protocol.value == SecurityConst.kTLSProtocol12:
+ return "TLSv1.2"
+ elif protocol.value == SecurityConst.kTLSProtocol11:
+ return "TLSv1.1"
+ elif protocol.value == SecurityConst.kTLSProtocol1:
+ return "TLSv1"
+ elif protocol.value == SecurityConst.kSSLProtocol3:
+ return "SSLv3"
+ elif protocol.value == SecurityConst.kSSLProtocol2:
+ return "SSLv2"
+ else:
+ raise ssl.SSLError("Unknown TLS version: %r" % protocol)
+
+ def _reuse(self):
+ self._makefile_refs += 1
+
+ def _drop(self):
+ if self._makefile_refs < 1:
+ self.close()
+ else:
+ self._makefile_refs -= 1
+
+
+if _fileobject: # Platform-specific: Python 2
+
+ def makefile(self, mode, bufsize=-1):
+ self._makefile_refs += 1
+ return _fileobject(self, mode, bufsize, close=True)
+
+
+else: # Platform-specific: Python 3
+
+ def makefile(self, mode="r", buffering=None, *args, **kwargs):
+ # We disable buffering with SecureTransport because it conflicts with
+ # the buffering that ST does internally (see issue #1153 for more).
+ buffering = 0
+ return backport_makefile(self, mode, buffering, *args, **kwargs)
+
+
+WrappedSocket.makefile = makefile
+
+
+class SecureTransportContext(object):
+ """
+ I am a wrapper class for the SecureTransport library, to translate the
+ interface of the standard library ``SSLContext`` object to calls into
+ SecureTransport.
+ """
+
+ def __init__(self, protocol):
+ self._min_version, self._max_version = _protocol_to_min_max[protocol]
+ self._options = 0
+ self._verify = False
+ self._trust_bundle = None
+ self._client_cert = None
+ self._client_key = None
+ self._client_key_passphrase = None
+ self._alpn_protocols = None
+
+ @property
+ def check_hostname(self):
+ """
+ SecureTransport cannot have its hostname checking disabled. For more,
+ see the comment on getpeercert() in this file.
+ """
+ return True
+
+ @check_hostname.setter
+ def check_hostname(self, value):
+ """
+ SecureTransport cannot have its hostname checking disabled. For more,
+ see the comment on getpeercert() in this file.
+ """
+ pass
+
+ @property
+ def options(self):
+ # TODO: Well, crap.
+ #
+ # So this is the bit of the code that is the most likely to cause us
+ # trouble. Essentially we need to enumerate all of the SSL options that
+ # users might want to use and try to see if we can sensibly translate
+ # them, or whether we should just ignore them.
+ return self._options
+
+ @options.setter
+ def options(self, value):
+ # TODO: Update in line with above.
+ self._options = value
+
+ @property
+ def verify_mode(self):
+ return ssl.CERT_REQUIRED if self._verify else ssl.CERT_NONE
+
+ @verify_mode.setter
+ def verify_mode(self, value):
+ self._verify = True if value == ssl.CERT_REQUIRED else False
+
+ def set_default_verify_paths(self):
+ # So, this has to do something a bit weird. Specifically, what it does
+ # is nothing.
+ #
+ # This means that, if we had previously had load_verify_locations
+ # called, this does not undo that. We need to do that because it turns
+ # out that the rest of the urllib3 code will attempt to load the
+ # default verify paths if it hasn't been told about any paths, even if
+ # the context itself was sometime earlier. We resolve that by just
+ # ignoring it.
+ pass
+
+ def load_default_certs(self):
+ return self.set_default_verify_paths()
+
+ def set_ciphers(self, ciphers):
+ # For now, we just require the default cipher string.
+ if ciphers != util.ssl_.DEFAULT_CIPHERS:
+ raise ValueError("SecureTransport doesn't support custom cipher strings")
+
+ def load_verify_locations(self, cafile=None, capath=None, cadata=None):
+ # OK, we only really support cadata and cafile.
+ if capath is not None:
+ raise ValueError("SecureTransport does not support cert directories")
+
+ # Raise if cafile does not exist.
+ if cafile is not None:
+ with open(cafile):
+ pass
+
+ self._trust_bundle = cafile or cadata
+
+ def load_cert_chain(self, certfile, keyfile=None, password=None):
+ self._client_cert = certfile
+ self._client_key = keyfile
+ self._client_cert_passphrase = password
+
+ def set_alpn_protocols(self, protocols):
+ """
+ Sets the ALPN protocols that will later be set on the context.
+
+ Raises a NotImplementedError if ALPN is not supported.
+ """
+ if not hasattr(Security, "SSLSetALPNProtocols"):
+ raise NotImplementedError(
+ "SecureTransport supports ALPN only in macOS 10.12+"
+ )
+ self._alpn_protocols = [six.ensure_binary(p) for p in protocols]
+
+ def wrap_socket(
+ self,
+ sock,
+ server_side=False,
+ do_handshake_on_connect=True,
+ suppress_ragged_eofs=True,
+ server_hostname=None,
+ ):
+ # So, what do we do here? Firstly, we assert some properties. This is a
+ # stripped down shim, so there is some functionality we don't support.
+ # See PEP 543 for the real deal.
+ assert not server_side
+ assert do_handshake_on_connect
+ assert suppress_ragged_eofs
+
+ # Ok, we're good to go. Now we want to create the wrapped socket object
+ # and store it in the appropriate place.
+ wrapped_socket = WrappedSocket(sock)
+
+ # Now we can handshake
+ wrapped_socket.handshake(
+ server_hostname,
+ self._verify,
+ self._trust_bundle,
+ self._min_version,
+ self._max_version,
+ self._client_cert,
+ self._client_key,
+ self._client_key_passphrase,
+ self._alpn_protocols,
+ )
+ return wrapped_socket
diff --git a/openpype/hosts/fusion/vendor/urllib3/contrib/socks.py b/openpype/hosts/fusion/vendor/urllib3/contrib/socks.py
new file mode 100644
index 0000000000..c326e80dd1
--- /dev/null
+++ b/openpype/hosts/fusion/vendor/urllib3/contrib/socks.py
@@ -0,0 +1,216 @@
+# -*- coding: utf-8 -*-
+"""
+This module contains provisional support for SOCKS proxies from within
+urllib3. This module supports SOCKS4, SOCKS4A (an extension of SOCKS4), and
+SOCKS5. To enable its functionality, either install PySocks or install this
+module with the ``socks`` extra.
+
+The SOCKS implementation supports the full range of urllib3 features. It also
+supports the following SOCKS features:
+
+- SOCKS4A (``proxy_url='socks4a://...``)
+- SOCKS4 (``proxy_url='socks4://...``)
+- SOCKS5 with remote DNS (``proxy_url='socks5h://...``)
+- SOCKS5 with local DNS (``proxy_url='socks5://...``)
+- Usernames and passwords for the SOCKS proxy
+
+.. note::
+ It is recommended to use ``socks5h://`` or ``socks4a://`` schemes in
+ your ``proxy_url`` to ensure that DNS resolution is done from the remote
+ server instead of client-side when connecting to a domain name.
+
+SOCKS4 supports IPv4 and domain names with the SOCKS4A extension. SOCKS5
+supports IPv4, IPv6, and domain names.
+
+When connecting to a SOCKS4 proxy the ``username`` portion of the ``proxy_url``
+will be sent as the ``userid`` section of the SOCKS request:
+
+.. code-block:: python
+
+ proxy_url="socks4a://@proxy-host"
+
+When connecting to a SOCKS5 proxy the ``username`` and ``password`` portion
+of the ``proxy_url`` will be sent as the username/password to authenticate
+with the proxy:
+
+.. code-block:: python
+
+ proxy_url="socks5h://:@proxy-host"
+
+"""
+from __future__ import absolute_import
+
+try:
+ import socks
+except ImportError:
+ import warnings
+
+ from ..exceptions import DependencyWarning
+
+ warnings.warn(
+ (
+ "SOCKS support in urllib3 requires the installation of optional "
+ "dependencies: specifically, PySocks. For more information, see "
+ "https://urllib3.readthedocs.io/en/1.26.x/contrib.html#socks-proxies"
+ ),
+ DependencyWarning,
+ )
+ raise
+
+from socket import error as SocketError
+from socket import timeout as SocketTimeout
+
+from ..connection import HTTPConnection, HTTPSConnection
+from ..connectionpool import HTTPConnectionPool, HTTPSConnectionPool
+from ..exceptions import ConnectTimeoutError, NewConnectionError
+from ..poolmanager import PoolManager
+from ..util.url import parse_url
+
+try:
+ import ssl
+except ImportError:
+ ssl = None
+
+
+class SOCKSConnection(HTTPConnection):
+ """
+ A plain-text HTTP connection that connects via a SOCKS proxy.
+ """
+
+ def __init__(self, *args, **kwargs):
+ self._socks_options = kwargs.pop("_socks_options")
+ super(SOCKSConnection, self).__init__(*args, **kwargs)
+
+ def _new_conn(self):
+ """
+ Establish a new connection via the SOCKS proxy.
+ """
+ extra_kw = {}
+ if self.source_address:
+ extra_kw["source_address"] = self.source_address
+
+ if self.socket_options:
+ extra_kw["socket_options"] = self.socket_options
+
+ try:
+ conn = socks.create_connection(
+ (self.host, self.port),
+ proxy_type=self._socks_options["socks_version"],
+ proxy_addr=self._socks_options["proxy_host"],
+ proxy_port=self._socks_options["proxy_port"],
+ proxy_username=self._socks_options["username"],
+ proxy_password=self._socks_options["password"],
+ proxy_rdns=self._socks_options["rdns"],
+ timeout=self.timeout,
+ **extra_kw
+ )
+
+ except SocketTimeout:
+ raise ConnectTimeoutError(
+ self,
+ "Connection to %s timed out. (connect timeout=%s)"
+ % (self.host, self.timeout),
+ )
+
+ except socks.ProxyError as e:
+ # This is fragile as hell, but it seems to be the only way to raise
+ # useful errors here.
+ if e.socket_err:
+ error = e.socket_err
+ if isinstance(error, SocketTimeout):
+ raise ConnectTimeoutError(
+ self,
+ "Connection to %s timed out. (connect timeout=%s)"
+ % (self.host, self.timeout),
+ )
+ else:
+ raise NewConnectionError(
+ self, "Failed to establish a new connection: %s" % error
+ )
+ else:
+ raise NewConnectionError(
+ self, "Failed to establish a new connection: %s" % e
+ )
+
+ except SocketError as e: # Defensive: PySocks should catch all these.
+ raise NewConnectionError(
+ self, "Failed to establish a new connection: %s" % e
+ )
+
+ return conn
+
+
+# We don't need to duplicate the Verified/Unverified distinction from
+# urllib3/connection.py here because the HTTPSConnection will already have been
+# correctly set to either the Verified or Unverified form by that module. This
+# means the SOCKSHTTPSConnection will automatically be the correct type.
+class SOCKSHTTPSConnection(SOCKSConnection, HTTPSConnection):
+ pass
+
+
+class SOCKSHTTPConnectionPool(HTTPConnectionPool):
+ ConnectionCls = SOCKSConnection
+
+
+class SOCKSHTTPSConnectionPool(HTTPSConnectionPool):
+ ConnectionCls = SOCKSHTTPSConnection
+
+
+class SOCKSProxyManager(PoolManager):
+ """
+ A version of the urllib3 ProxyManager that routes connections via the
+ defined SOCKS proxy.
+ """
+
+ pool_classes_by_scheme = {
+ "http": SOCKSHTTPConnectionPool,
+ "https": SOCKSHTTPSConnectionPool,
+ }
+
+ def __init__(
+ self,
+ proxy_url,
+ username=None,
+ password=None,
+ num_pools=10,
+ headers=None,
+ **connection_pool_kw
+ ):
+ parsed = parse_url(proxy_url)
+
+ if username is None and password is None and parsed.auth is not None:
+ split = parsed.auth.split(":")
+ if len(split) == 2:
+ username, password = split
+ if parsed.scheme == "socks5":
+ socks_version = socks.PROXY_TYPE_SOCKS5
+ rdns = False
+ elif parsed.scheme == "socks5h":
+ socks_version = socks.PROXY_TYPE_SOCKS5
+ rdns = True
+ elif parsed.scheme == "socks4":
+ socks_version = socks.PROXY_TYPE_SOCKS4
+ rdns = False
+ elif parsed.scheme == "socks4a":
+ socks_version = socks.PROXY_TYPE_SOCKS4
+ rdns = True
+ else:
+ raise ValueError("Unable to determine SOCKS version from %s" % proxy_url)
+
+ self.proxy_url = proxy_url
+
+ socks_options = {
+ "socks_version": socks_version,
+ "proxy_host": parsed.host,
+ "proxy_port": parsed.port,
+ "username": username,
+ "password": password,
+ "rdns": rdns,
+ }
+ connection_pool_kw["_socks_options"] = socks_options
+
+ super(SOCKSProxyManager, self).__init__(
+ num_pools, headers, **connection_pool_kw
+ )
+
+ self.pool_classes_by_scheme = SOCKSProxyManager.pool_classes_by_scheme
diff --git a/openpype/hosts/fusion/vendor/urllib3/exceptions.py b/openpype/hosts/fusion/vendor/urllib3/exceptions.py
new file mode 100644
index 0000000000..cba6f3f560
--- /dev/null
+++ b/openpype/hosts/fusion/vendor/urllib3/exceptions.py
@@ -0,0 +1,323 @@
+from __future__ import absolute_import
+
+from .packages.six.moves.http_client import IncompleteRead as httplib_IncompleteRead
+
+# Base Exceptions
+
+
+class HTTPError(Exception):
+ """Base exception used by this module."""
+
+ pass
+
+
+class HTTPWarning(Warning):
+ """Base warning used by this module."""
+
+ pass
+
+
+class PoolError(HTTPError):
+ """Base exception for errors caused within a pool."""
+
+ def __init__(self, pool, message):
+ self.pool = pool
+ HTTPError.__init__(self, "%s: %s" % (pool, message))
+
+ def __reduce__(self):
+ # For pickling purposes.
+ return self.__class__, (None, None)
+
+
+class RequestError(PoolError):
+ """Base exception for PoolErrors that have associated URLs."""
+
+ def __init__(self, pool, url, message):
+ self.url = url
+ PoolError.__init__(self, pool, message)
+
+ def __reduce__(self):
+ # For pickling purposes.
+ return self.__class__, (None, self.url, None)
+
+
+class SSLError(HTTPError):
+ """Raised when SSL certificate fails in an HTTPS connection."""
+
+ pass
+
+
+class ProxyError(HTTPError):
+ """Raised when the connection to a proxy fails."""
+
+ def __init__(self, message, error, *args):
+ super(ProxyError, self).__init__(message, error, *args)
+ self.original_error = error
+
+
+class DecodeError(HTTPError):
+ """Raised when automatic decoding based on Content-Type fails."""
+
+ pass
+
+
+class ProtocolError(HTTPError):
+ """Raised when something unexpected happens mid-request/response."""
+
+ pass
+
+
+#: Renamed to ProtocolError but aliased for backwards compatibility.
+ConnectionError = ProtocolError
+
+
+# Leaf Exceptions
+
+
+class MaxRetryError(RequestError):
+ """Raised when the maximum number of retries is exceeded.
+
+ :param pool: The connection pool
+ :type pool: :class:`~urllib3.connectionpool.HTTPConnectionPool`
+ :param string url: The requested Url
+ :param exceptions.Exception reason: The underlying error
+
+ """
+
+ def __init__(self, pool, url, reason=None):
+ self.reason = reason
+
+ message = "Max retries exceeded with url: %s (Caused by %r)" % (url, reason)
+
+ RequestError.__init__(self, pool, url, message)
+
+
+class HostChangedError(RequestError):
+ """Raised when an existing pool gets a request for a foreign host."""
+
+ def __init__(self, pool, url, retries=3):
+ message = "Tried to open a foreign host with url: %s" % url
+ RequestError.__init__(self, pool, url, message)
+ self.retries = retries
+
+
+class TimeoutStateError(HTTPError):
+ """Raised when passing an invalid state to a timeout"""
+
+ pass
+
+
+class TimeoutError(HTTPError):
+ """Raised when a socket timeout error occurs.
+
+ Catching this error will catch both :exc:`ReadTimeoutErrors
+ ` and :exc:`ConnectTimeoutErrors `.
+ """
+
+ pass
+
+
+class ReadTimeoutError(TimeoutError, RequestError):
+ """Raised when a socket timeout occurs while receiving data from a server"""
+
+ pass
+
+
+# This timeout error does not have a URL attached and needs to inherit from the
+# base HTTPError
+class ConnectTimeoutError(TimeoutError):
+ """Raised when a socket timeout occurs while connecting to a server"""
+
+ pass
+
+
+class NewConnectionError(ConnectTimeoutError, PoolError):
+ """Raised when we fail to establish a new connection. Usually ECONNREFUSED."""
+
+ pass
+
+
+class EmptyPoolError(PoolError):
+ """Raised when a pool runs out of connections and no more are allowed."""
+
+ pass
+
+
+class ClosedPoolError(PoolError):
+ """Raised when a request enters a pool after the pool has been closed."""
+
+ pass
+
+
+class LocationValueError(ValueError, HTTPError):
+ """Raised when there is something wrong with a given URL input."""
+
+ pass
+
+
+class LocationParseError(LocationValueError):
+ """Raised when get_host or similar fails to parse the URL input."""
+
+ def __init__(self, location):
+ message = "Failed to parse: %s" % location
+ HTTPError.__init__(self, message)
+
+ self.location = location
+
+
+class URLSchemeUnknown(LocationValueError):
+ """Raised when a URL input has an unsupported scheme."""
+
+ def __init__(self, scheme):
+ message = "Not supported URL scheme %s" % scheme
+ super(URLSchemeUnknown, self).__init__(message)
+
+ self.scheme = scheme
+
+
+class ResponseError(HTTPError):
+ """Used as a container for an error reason supplied in a MaxRetryError."""
+
+ GENERIC_ERROR = "too many error responses"
+ SPECIFIC_ERROR = "too many {status_code} error responses"
+
+
+class SecurityWarning(HTTPWarning):
+ """Warned when performing security reducing actions"""
+
+ pass
+
+
+class SubjectAltNameWarning(SecurityWarning):
+ """Warned when connecting to a host with a certificate missing a SAN."""
+
+ pass
+
+
+class InsecureRequestWarning(SecurityWarning):
+ """Warned when making an unverified HTTPS request."""
+
+ pass
+
+
+class SystemTimeWarning(SecurityWarning):
+ """Warned when system time is suspected to be wrong"""
+
+ pass
+
+
+class InsecurePlatformWarning(SecurityWarning):
+ """Warned when certain TLS/SSL configuration is not available on a platform."""
+
+ pass
+
+
+class SNIMissingWarning(HTTPWarning):
+ """Warned when making a HTTPS request without SNI available."""
+
+ pass
+
+
+class DependencyWarning(HTTPWarning):
+ """
+ Warned when an attempt is made to import a module with missing optional
+ dependencies.
+ """
+
+ pass
+
+
+class ResponseNotChunked(ProtocolError, ValueError):
+ """Response needs to be chunked in order to read it as chunks."""
+
+ pass
+
+
+class BodyNotHttplibCompatible(HTTPError):
+ """
+ Body should be :class:`http.client.HTTPResponse` like
+ (have an fp attribute which returns raw chunks) for read_chunked().
+ """
+
+ pass
+
+
+class IncompleteRead(HTTPError, httplib_IncompleteRead):
+ """
+ Response length doesn't match expected Content-Length
+
+ Subclass of :class:`http.client.IncompleteRead` to allow int value
+ for ``partial`` to avoid creating large objects on streamed reads.
+ """
+
+ def __init__(self, partial, expected):
+ super(IncompleteRead, self).__init__(partial, expected)
+
+ def __repr__(self):
+ return "IncompleteRead(%i bytes read, %i more expected)" % (
+ self.partial,
+ self.expected,
+ )
+
+
+class InvalidChunkLength(HTTPError, httplib_IncompleteRead):
+ """Invalid chunk length in a chunked response."""
+
+ def __init__(self, response, length):
+ super(InvalidChunkLength, self).__init__(
+ response.tell(), response.length_remaining
+ )
+ self.response = response
+ self.length = length
+
+ def __repr__(self):
+ return "InvalidChunkLength(got length %r, %i bytes read)" % (
+ self.length,
+ self.partial,
+ )
+
+
+class InvalidHeader(HTTPError):
+ """The header provided was somehow invalid."""
+
+ pass
+
+
+class ProxySchemeUnknown(AssertionError, URLSchemeUnknown):
+ """ProxyManager does not support the supplied scheme"""
+
+ # TODO(t-8ch): Stop inheriting from AssertionError in v2.0.
+
+ def __init__(self, scheme):
+ # 'localhost' is here because our URL parser parses
+ # localhost:8080 -> scheme=localhost, remove if we fix this.
+ if scheme == "localhost":
+ scheme = None
+ if scheme is None:
+ message = "Proxy URL had no scheme, should start with http:// or https://"
+ else:
+ message = (
+ "Proxy URL had unsupported scheme %s, should use http:// or https://"
+ % scheme
+ )
+ super(ProxySchemeUnknown, self).__init__(message)
+
+
+class ProxySchemeUnsupported(ValueError):
+ """Fetching HTTPS resources through HTTPS proxies is unsupported"""
+
+ pass
+
+
+class HeaderParsingError(HTTPError):
+ """Raised by assert_header_parsing, but we convert it to a log.warning statement."""
+
+ def __init__(self, defects, unparsed_data):
+ message = "%s, unparsed data: %r" % (defects or "Unknown", unparsed_data)
+ super(HeaderParsingError, self).__init__(message)
+
+
+class UnrewindableBodyError(HTTPError):
+ """urllib3 encountered an error when trying to rewind a body"""
+
+ pass
diff --git a/openpype/hosts/fusion/vendor/urllib3/fields.py b/openpype/hosts/fusion/vendor/urllib3/fields.py
new file mode 100644
index 0000000000..9d630f491d
--- /dev/null
+++ b/openpype/hosts/fusion/vendor/urllib3/fields.py
@@ -0,0 +1,274 @@
+from __future__ import absolute_import
+
+import email.utils
+import mimetypes
+import re
+
+from .packages import six
+
+
+def guess_content_type(filename, default="application/octet-stream"):
+ """
+ Guess the "Content-Type" of a file.
+
+ :param filename:
+ The filename to guess the "Content-Type" of using :mod:`mimetypes`.
+ :param default:
+ If no "Content-Type" can be guessed, default to `default`.
+ """
+ if filename:
+ return mimetypes.guess_type(filename)[0] or default
+ return default
+
+
+def format_header_param_rfc2231(name, value):
+ """
+ Helper function to format and quote a single header parameter using the
+ strategy defined in RFC 2231.
+
+ Particularly useful for header parameters which might contain
+ non-ASCII values, like file names. This follows
+ `RFC 2388 Section 4.4 `_.
+
+ :param name:
+ The name of the parameter, a string expected to be ASCII only.
+ :param value:
+ The value of the parameter, provided as ``bytes`` or `str``.
+ :ret:
+ An RFC-2231-formatted unicode string.
+ """
+ if isinstance(value, six.binary_type):
+ value = value.decode("utf-8")
+
+ if not any(ch in value for ch in '"\\\r\n'):
+ result = u'%s="%s"' % (name, value)
+ try:
+ result.encode("ascii")
+ except (UnicodeEncodeError, UnicodeDecodeError):
+ pass
+ else:
+ return result
+
+ if six.PY2: # Python 2:
+ value = value.encode("utf-8")
+
+ # encode_rfc2231 accepts an encoded string and returns an ascii-encoded
+ # string in Python 2 but accepts and returns unicode strings in Python 3
+ value = email.utils.encode_rfc2231(value, "utf-8")
+ value = "%s*=%s" % (name, value)
+
+ if six.PY2: # Python 2:
+ value = value.decode("utf-8")
+
+ return value
+
+
+_HTML5_REPLACEMENTS = {
+ u"\u0022": u"%22",
+ # Replace "\" with "\\".
+ u"\u005C": u"\u005C\u005C",
+}
+
+# All control characters from 0x00 to 0x1F *except* 0x1B.
+_HTML5_REPLACEMENTS.update(
+ {
+ six.unichr(cc): u"%{:02X}".format(cc)
+ for cc in range(0x00, 0x1F + 1)
+ if cc not in (0x1B,)
+ }
+)
+
+
+def _replace_multiple(value, needles_and_replacements):
+ def replacer(match):
+ return needles_and_replacements[match.group(0)]
+
+ pattern = re.compile(
+ r"|".join([re.escape(needle) for needle in needles_and_replacements.keys()])
+ )
+
+ result = pattern.sub(replacer, value)
+
+ return result
+
+
+def format_header_param_html5(name, value):
+ """
+ Helper function to format and quote a single header parameter using the
+ HTML5 strategy.
+
+ Particularly useful for header parameters which might contain
+ non-ASCII values, like file names. This follows the `HTML5 Working Draft
+ Section 4.10.22.7`_ and matches the behavior of curl and modern browsers.
+
+ .. _HTML5 Working Draft Section 4.10.22.7:
+ https://w3c.github.io/html/sec-forms.html#multipart-form-data
+
+ :param name:
+ The name of the parameter, a string expected to be ASCII only.
+ :param value:
+ The value of the parameter, provided as ``bytes`` or `str``.
+ :ret:
+ A unicode string, stripped of troublesome characters.
+ """
+ if isinstance(value, six.binary_type):
+ value = value.decode("utf-8")
+
+ value = _replace_multiple(value, _HTML5_REPLACEMENTS)
+
+ return u'%s="%s"' % (name, value)
+
+
+# For backwards-compatibility.
+format_header_param = format_header_param_html5
+
+
+class RequestField(object):
+ """
+ A data container for request body parameters.
+
+ :param name:
+ The name of this request field. Must be unicode.
+ :param data:
+ The data/value body.
+ :param filename:
+ An optional filename of the request field. Must be unicode.
+ :param headers:
+ An optional dict-like object of headers to initially use for the field.
+ :param header_formatter:
+ An optional callable that is used to encode and format the headers. By
+ default, this is :func:`format_header_param_html5`.
+ """
+
+ def __init__(
+ self,
+ name,
+ data,
+ filename=None,
+ headers=None,
+ header_formatter=format_header_param_html5,
+ ):
+ self._name = name
+ self._filename = filename
+ self.data = data
+ self.headers = {}
+ if headers:
+ self.headers = dict(headers)
+ self.header_formatter = header_formatter
+
+ @classmethod
+ def from_tuples(cls, fieldname, value, header_formatter=format_header_param_html5):
+ """
+ A :class:`~urllib3.fields.RequestField` factory from old-style tuple parameters.
+
+ Supports constructing :class:`~urllib3.fields.RequestField` from
+ parameter of key/value strings AND key/filetuple. A filetuple is a
+ (filename, data, MIME type) tuple where the MIME type is optional.
+ For example::
+
+ 'foo': 'bar',
+ 'fakefile': ('foofile.txt', 'contents of foofile'),
+ 'realfile': ('barfile.txt', open('realfile').read()),
+ 'typedfile': ('bazfile.bin', open('bazfile').read(), 'image/jpeg'),
+ 'nonamefile': 'contents of nonamefile field',
+
+ Field names and filenames must be unicode.
+ """
+ if isinstance(value, tuple):
+ if len(value) == 3:
+ filename, data, content_type = value
+ else:
+ filename, data = value
+ content_type = guess_content_type(filename)
+ else:
+ filename = None
+ content_type = None
+ data = value
+
+ request_param = cls(
+ fieldname, data, filename=filename, header_formatter=header_formatter
+ )
+ request_param.make_multipart(content_type=content_type)
+
+ return request_param
+
+ def _render_part(self, name, value):
+ """
+ Overridable helper function to format a single header parameter. By
+ default, this calls ``self.header_formatter``.
+
+ :param name:
+ The name of the parameter, a string expected to be ASCII only.
+ :param value:
+ The value of the parameter, provided as a unicode string.
+ """
+
+ return self.header_formatter(name, value)
+
+ def _render_parts(self, header_parts):
+ """
+ Helper function to format and quote a single header.
+
+ Useful for single headers that are composed of multiple items. E.g.,
+ 'Content-Disposition' fields.
+
+ :param header_parts:
+ A sequence of (k, v) tuples or a :class:`dict` of (k, v) to format
+ as `k1="v1"; k2="v2"; ...`.
+ """
+ parts = []
+ iterable = header_parts
+ if isinstance(header_parts, dict):
+ iterable = header_parts.items()
+
+ for name, value in iterable:
+ if value is not None:
+ parts.append(self._render_part(name, value))
+
+ return u"; ".join(parts)
+
+ def render_headers(self):
+ """
+ Renders the headers for this request field.
+ """
+ lines = []
+
+ sort_keys = ["Content-Disposition", "Content-Type", "Content-Location"]
+ for sort_key in sort_keys:
+ if self.headers.get(sort_key, False):
+ lines.append(u"%s: %s" % (sort_key, self.headers[sort_key]))
+
+ for header_name, header_value in self.headers.items():
+ if header_name not in sort_keys:
+ if header_value:
+ lines.append(u"%s: %s" % (header_name, header_value))
+
+ lines.append(u"\r\n")
+ return u"\r\n".join(lines)
+
+ def make_multipart(
+ self, content_disposition=None, content_type=None, content_location=None
+ ):
+ """
+ Makes this request field into a multipart request field.
+
+ This method overrides "Content-Disposition", "Content-Type" and
+ "Content-Location" headers to the request parameter.
+
+ :param content_type:
+ The 'Content-Type' of the request body.
+ :param content_location:
+ The 'Content-Location' of the request body.
+
+ """
+ self.headers["Content-Disposition"] = content_disposition or u"form-data"
+ self.headers["Content-Disposition"] += u"; ".join(
+ [
+ u"",
+ self._render_parts(
+ ((u"name", self._name), (u"filename", self._filename))
+ ),
+ ]
+ )
+ self.headers["Content-Type"] = content_type
+ self.headers["Content-Location"] = content_location
diff --git a/openpype/hosts/fusion/vendor/urllib3/filepost.py b/openpype/hosts/fusion/vendor/urllib3/filepost.py
new file mode 100644
index 0000000000..36c9252c64
--- /dev/null
+++ b/openpype/hosts/fusion/vendor/urllib3/filepost.py
@@ -0,0 +1,98 @@
+from __future__ import absolute_import
+
+import binascii
+import codecs
+import os
+from io import BytesIO
+
+from .fields import RequestField
+from .packages import six
+from .packages.six import b
+
+writer = codecs.lookup("utf-8")[3]
+
+
+def choose_boundary():
+ """
+ Our embarrassingly-simple replacement for mimetools.choose_boundary.
+ """
+ boundary = binascii.hexlify(os.urandom(16))
+ if not six.PY2:
+ boundary = boundary.decode("ascii")
+ return boundary
+
+
+def iter_field_objects(fields):
+ """
+ Iterate over fields.
+
+ Supports list of (k, v) tuples and dicts, and lists of
+ :class:`~urllib3.fields.RequestField`.
+
+ """
+ if isinstance(fields, dict):
+ i = six.iteritems(fields)
+ else:
+ i = iter(fields)
+
+ for field in i:
+ if isinstance(field, RequestField):
+ yield field
+ else:
+ yield RequestField.from_tuples(*field)
+
+
+def iter_fields(fields):
+ """
+ .. deprecated:: 1.6
+
+ Iterate over fields.
+
+ The addition of :class:`~urllib3.fields.RequestField` makes this function
+ obsolete. Instead, use :func:`iter_field_objects`, which returns
+ :class:`~urllib3.fields.RequestField` objects.
+
+ Supports list of (k, v) tuples and dicts.
+ """
+ if isinstance(fields, dict):
+ return ((k, v) for k, v in six.iteritems(fields))
+
+ return ((k, v) for k, v in fields)
+
+
+def encode_multipart_formdata(fields, boundary=None):
+ """
+ Encode a dictionary of ``fields`` using the multipart/form-data MIME format.
+
+ :param fields:
+ Dictionary of fields or list of (key, :class:`~urllib3.fields.RequestField`).
+
+ :param boundary:
+ If not specified, then a random boundary will be generated using
+ :func:`urllib3.filepost.choose_boundary`.
+ """
+ body = BytesIO()
+ if boundary is None:
+ boundary = choose_boundary()
+
+ for field in iter_field_objects(fields):
+ body.write(b("--%s\r\n" % (boundary)))
+
+ writer(body).write(field.render_headers())
+ data = field.data
+
+ if isinstance(data, int):
+ data = str(data) # Backwards compatibility
+
+ if isinstance(data, six.text_type):
+ writer(body).write(data)
+ else:
+ body.write(data)
+
+ body.write(b"\r\n")
+
+ body.write(b("--%s--\r\n" % (boundary)))
+
+ content_type = str("multipart/form-data; boundary=%s" % boundary)
+
+ return body.getvalue(), content_type
diff --git a/openpype/hosts/fusion/vendor/urllib3/packages/__init__.py b/openpype/hosts/fusion/vendor/urllib3/packages/__init__.py
new file mode 100644
index 0000000000..fce4caa65d
--- /dev/null
+++ b/openpype/hosts/fusion/vendor/urllib3/packages/__init__.py
@@ -0,0 +1,5 @@
+from __future__ import absolute_import
+
+from . import ssl_match_hostname
+
+__all__ = ("ssl_match_hostname",)
diff --git a/openpype/hosts/fusion/vendor/urllib3/packages/backports/__init__.py b/openpype/hosts/fusion/vendor/urllib3/packages/backports/__init__.py
new file mode 100644
index 0000000000..e69de29bb2
diff --git a/openpype/hosts/fusion/vendor/urllib3/packages/backports/makefile.py b/openpype/hosts/fusion/vendor/urllib3/packages/backports/makefile.py
new file mode 100644
index 0000000000..b8fb2154b6
--- /dev/null
+++ b/openpype/hosts/fusion/vendor/urllib3/packages/backports/makefile.py
@@ -0,0 +1,51 @@
+# -*- coding: utf-8 -*-
+"""
+backports.makefile
+~~~~~~~~~~~~~~~~~~
+
+Backports the Python 3 ``socket.makefile`` method for use with anything that
+wants to create a "fake" socket object.
+"""
+import io
+from socket import SocketIO
+
+
+def backport_makefile(
+ self, mode="r", buffering=None, encoding=None, errors=None, newline=None
+):
+ """
+ Backport of ``socket.makefile`` from Python 3.5.
+ """
+ if not set(mode) <= {"r", "w", "b"}:
+ raise ValueError("invalid mode %r (only r, w, b allowed)" % (mode,))
+ writing = "w" in mode
+ reading = "r" in mode or not writing
+ assert reading or writing
+ binary = "b" in mode
+ rawmode = ""
+ if reading:
+ rawmode += "r"
+ if writing:
+ rawmode += "w"
+ raw = SocketIO(self, rawmode)
+ self._makefile_refs += 1
+ if buffering is None:
+ buffering = -1
+ if buffering < 0:
+ buffering = io.DEFAULT_BUFFER_SIZE
+ if buffering == 0:
+ if not binary:
+ raise ValueError("unbuffered streams must be binary")
+ return raw
+ if reading and writing:
+ buffer = io.BufferedRWPair(raw, raw, buffering)
+ elif reading:
+ buffer = io.BufferedReader(raw, buffering)
+ else:
+ assert writing
+ buffer = io.BufferedWriter(raw, buffering)
+ if binary:
+ return buffer
+ text = io.TextIOWrapper(buffer, encoding, errors, newline)
+ text.mode = mode
+ return text
diff --git a/openpype/hosts/fusion/vendor/urllib3/packages/six.py b/openpype/hosts/fusion/vendor/urllib3/packages/six.py
new file mode 100644
index 0000000000..ba50acb062
--- /dev/null
+++ b/openpype/hosts/fusion/vendor/urllib3/packages/six.py
@@ -0,0 +1,1077 @@
+# Copyright (c) 2010-2020 Benjamin Peterson
+#
+# Permission is hereby granted, free of charge, to any person obtaining a copy
+# of this software and associated documentation files (the "Software"), to deal
+# in the Software without restriction, including without limitation the rights
+# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
+# copies of the Software, and to permit persons to whom the Software is
+# furnished to do so, subject to the following conditions:
+#
+# The above copyright notice and this permission notice shall be included in all
+# copies or substantial portions of the Software.
+#
+# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
+# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
+# SOFTWARE.
+
+"""Utilities for writing code that runs on Python 2 and 3"""
+
+from __future__ import absolute_import
+
+import functools
+import itertools
+import operator
+import sys
+import types
+
+__author__ = "Benjamin Peterson "
+__version__ = "1.16.0"
+
+
+# Useful for very coarse version differentiation.
+PY2 = sys.version_info[0] == 2
+PY3 = sys.version_info[0] == 3
+PY34 = sys.version_info[0:2] >= (3, 4)
+
+if PY3:
+ string_types = (str,)
+ integer_types = (int,)
+ class_types = (type,)
+ text_type = str
+ binary_type = bytes
+
+ MAXSIZE = sys.maxsize
+else:
+ string_types = (basestring,)
+ integer_types = (int, long)
+ class_types = (type, types.ClassType)
+ text_type = unicode
+ binary_type = str
+
+ if sys.platform.startswith("java"):
+ # Jython always uses 32 bits.
+ MAXSIZE = int((1 << 31) - 1)
+ else:
+ # It's possible to have sizeof(long) != sizeof(Py_ssize_t).
+ class X(object):
+ def __len__(self):
+ return 1 << 31
+
+ try:
+ len(X())
+ except OverflowError:
+ # 32-bit
+ MAXSIZE = int((1 << 31) - 1)
+ else:
+ # 64-bit
+ MAXSIZE = int((1 << 63) - 1)
+ del X
+
+if PY34:
+ from importlib.util import spec_from_loader
+else:
+ spec_from_loader = None
+
+
+def _add_doc(func, doc):
+ """Add documentation to a function."""
+ func.__doc__ = doc
+
+
+def _import_module(name):
+ """Import module, returning the module after the last dot."""
+ __import__(name)
+ return sys.modules[name]
+
+
+class _LazyDescr(object):
+ def __init__(self, name):
+ self.name = name
+
+ def __get__(self, obj, tp):
+ result = self._resolve()
+ setattr(obj, self.name, result) # Invokes __set__.
+ try:
+ # This is a bit ugly, but it avoids running this again by
+ # removing this descriptor.
+ delattr(obj.__class__, self.name)
+ except AttributeError:
+ pass
+ return result
+
+
+class MovedModule(_LazyDescr):
+ def __init__(self, name, old, new=None):
+ super(MovedModule, self).__init__(name)
+ if PY3:
+ if new is None:
+ new = name
+ self.mod = new
+ else:
+ self.mod = old
+
+ def _resolve(self):
+ return _import_module(self.mod)
+
+ def __getattr__(self, attr):
+ _module = self._resolve()
+ value = getattr(_module, attr)
+ setattr(self, attr, value)
+ return value
+
+
+class _LazyModule(types.ModuleType):
+ def __init__(self, name):
+ super(_LazyModule, self).__init__(name)
+ self.__doc__ = self.__class__.__doc__
+
+ def __dir__(self):
+ attrs = ["__doc__", "__name__"]
+ attrs += [attr.name for attr in self._moved_attributes]
+ return attrs
+
+ # Subclasses should override this
+ _moved_attributes = []
+
+
+class MovedAttribute(_LazyDescr):
+ def __init__(self, name, old_mod, new_mod, old_attr=None, new_attr=None):
+ super(MovedAttribute, self).__init__(name)
+ if PY3:
+ if new_mod is None:
+ new_mod = name
+ self.mod = new_mod
+ if new_attr is None:
+ if old_attr is None:
+ new_attr = name
+ else:
+ new_attr = old_attr
+ self.attr = new_attr
+ else:
+ self.mod = old_mod
+ if old_attr is None:
+ old_attr = name
+ self.attr = old_attr
+
+ def _resolve(self):
+ module = _import_module(self.mod)
+ return getattr(module, self.attr)
+
+
+class _SixMetaPathImporter(object):
+
+ """
+ A meta path importer to import six.moves and its submodules.
+
+ This class implements a PEP302 finder and loader. It should be compatible
+ with Python 2.5 and all existing versions of Python3
+ """
+
+ def __init__(self, six_module_name):
+ self.name = six_module_name
+ self.known_modules = {}
+
+ def _add_module(self, mod, *fullnames):
+ for fullname in fullnames:
+ self.known_modules[self.name + "." + fullname] = mod
+
+ def _get_module(self, fullname):
+ return self.known_modules[self.name + "." + fullname]
+
+ def find_module(self, fullname, path=None):
+ if fullname in self.known_modules:
+ return self
+ return None
+
+ def find_spec(self, fullname, path, target=None):
+ if fullname in self.known_modules:
+ return spec_from_loader(fullname, self)
+ return None
+
+ def __get_module(self, fullname):
+ try:
+ return self.known_modules[fullname]
+ except KeyError:
+ raise ImportError("This loader does not know module " + fullname)
+
+ def load_module(self, fullname):
+ try:
+ # in case of a reload
+ return sys.modules[fullname]
+ except KeyError:
+ pass
+ mod = self.__get_module(fullname)
+ if isinstance(mod, MovedModule):
+ mod = mod._resolve()
+ else:
+ mod.__loader__ = self
+ sys.modules[fullname] = mod
+ return mod
+
+ def is_package(self, fullname):
+ """
+ Return true, if the named module is a package.
+
+ We need this method to get correct spec objects with
+ Python 3.4 (see PEP451)
+ """
+ return hasattr(self.__get_module(fullname), "__path__")
+
+ def get_code(self, fullname):
+ """Return None
+
+ Required, if is_package is implemented"""
+ self.__get_module(fullname) # eventually raises ImportError
+ return None
+
+ get_source = get_code # same as get_code
+
+ def create_module(self, spec):
+ return self.load_module(spec.name)
+
+ def exec_module(self, module):
+ pass
+
+
+_importer = _SixMetaPathImporter(__name__)
+
+
+class _MovedItems(_LazyModule):
+
+ """Lazy loading of moved objects"""
+
+ __path__ = [] # mark as package
+
+
+_moved_attributes = [
+ MovedAttribute("cStringIO", "cStringIO", "io", "StringIO"),
+ MovedAttribute("filter", "itertools", "builtins", "ifilter", "filter"),
+ MovedAttribute(
+ "filterfalse", "itertools", "itertools", "ifilterfalse", "filterfalse"
+ ),
+ MovedAttribute("input", "__builtin__", "builtins", "raw_input", "input"),
+ MovedAttribute("intern", "__builtin__", "sys"),
+ MovedAttribute("map", "itertools", "builtins", "imap", "map"),
+ MovedAttribute("getcwd", "os", "os", "getcwdu", "getcwd"),
+ MovedAttribute("getcwdb", "os", "os", "getcwd", "getcwdb"),
+ MovedAttribute("getoutput", "commands", "subprocess"),
+ MovedAttribute("range", "__builtin__", "builtins", "xrange", "range"),
+ MovedAttribute(
+ "reload_module", "__builtin__", "importlib" if PY34 else "imp", "reload"
+ ),
+ MovedAttribute("reduce", "__builtin__", "functools"),
+ MovedAttribute("shlex_quote", "pipes", "shlex", "quote"),
+ MovedAttribute("StringIO", "StringIO", "io"),
+ MovedAttribute("UserDict", "UserDict", "collections"),
+ MovedAttribute("UserList", "UserList", "collections"),
+ MovedAttribute("UserString", "UserString", "collections"),
+ MovedAttribute("xrange", "__builtin__", "builtins", "xrange", "range"),
+ MovedAttribute("zip", "itertools", "builtins", "izip", "zip"),
+ MovedAttribute(
+ "zip_longest", "itertools", "itertools", "izip_longest", "zip_longest"
+ ),
+ MovedModule("builtins", "__builtin__"),
+ MovedModule("configparser", "ConfigParser"),
+ MovedModule(
+ "collections_abc",
+ "collections",
+ "collections.abc" if sys.version_info >= (3, 3) else "collections",
+ ),
+ MovedModule("copyreg", "copy_reg"),
+ MovedModule("dbm_gnu", "gdbm", "dbm.gnu"),
+ MovedModule("dbm_ndbm", "dbm", "dbm.ndbm"),
+ MovedModule(
+ "_dummy_thread",
+ "dummy_thread",
+ "_dummy_thread" if sys.version_info < (3, 9) else "_thread",
+ ),
+ MovedModule("http_cookiejar", "cookielib", "http.cookiejar"),
+ MovedModule("http_cookies", "Cookie", "http.cookies"),
+ MovedModule("html_entities", "htmlentitydefs", "html.entities"),
+ MovedModule("html_parser", "HTMLParser", "html.parser"),
+ MovedModule("http_client", "httplib", "http.client"),
+ MovedModule("email_mime_base", "email.MIMEBase", "email.mime.base"),
+ MovedModule("email_mime_image", "email.MIMEImage", "email.mime.image"),
+ MovedModule("email_mime_multipart", "email.MIMEMultipart", "email.mime.multipart"),
+ MovedModule(
+ "email_mime_nonmultipart", "email.MIMENonMultipart", "email.mime.nonmultipart"
+ ),
+ MovedModule("email_mime_text", "email.MIMEText", "email.mime.text"),
+ MovedModule("BaseHTTPServer", "BaseHTTPServer", "http.server"),
+ MovedModule("CGIHTTPServer", "CGIHTTPServer", "http.server"),
+ MovedModule("SimpleHTTPServer", "SimpleHTTPServer", "http.server"),
+ MovedModule("cPickle", "cPickle", "pickle"),
+ MovedModule("queue", "Queue"),
+ MovedModule("reprlib", "repr"),
+ MovedModule("socketserver", "SocketServer"),
+ MovedModule("_thread", "thread", "_thread"),
+ MovedModule("tkinter", "Tkinter"),
+ MovedModule("tkinter_dialog", "Dialog", "tkinter.dialog"),
+ MovedModule("tkinter_filedialog", "FileDialog", "tkinter.filedialog"),
+ MovedModule("tkinter_scrolledtext", "ScrolledText", "tkinter.scrolledtext"),
+ MovedModule("tkinter_simpledialog", "SimpleDialog", "tkinter.simpledialog"),
+ MovedModule("tkinter_tix", "Tix", "tkinter.tix"),
+ MovedModule("tkinter_ttk", "ttk", "tkinter.ttk"),
+ MovedModule("tkinter_constants", "Tkconstants", "tkinter.constants"),
+ MovedModule("tkinter_dnd", "Tkdnd", "tkinter.dnd"),
+ MovedModule("tkinter_colorchooser", "tkColorChooser", "tkinter.colorchooser"),
+ MovedModule("tkinter_commondialog", "tkCommonDialog", "tkinter.commondialog"),
+ MovedModule("tkinter_tkfiledialog", "tkFileDialog", "tkinter.filedialog"),
+ MovedModule("tkinter_font", "tkFont", "tkinter.font"),
+ MovedModule("tkinter_messagebox", "tkMessageBox", "tkinter.messagebox"),
+ MovedModule("tkinter_tksimpledialog", "tkSimpleDialog", "tkinter.simpledialog"),
+ MovedModule("urllib_parse", __name__ + ".moves.urllib_parse", "urllib.parse"),
+ MovedModule("urllib_error", __name__ + ".moves.urllib_error", "urllib.error"),
+ MovedModule("urllib", __name__ + ".moves.urllib", __name__ + ".moves.urllib"),
+ MovedModule("urllib_robotparser", "robotparser", "urllib.robotparser"),
+ MovedModule("xmlrpc_client", "xmlrpclib", "xmlrpc.client"),
+ MovedModule("xmlrpc_server", "SimpleXMLRPCServer", "xmlrpc.server"),
+]
+# Add windows specific modules.
+if sys.platform == "win32":
+ _moved_attributes += [
+ MovedModule("winreg", "_winreg"),
+ ]
+
+for attr in _moved_attributes:
+ setattr(_MovedItems, attr.name, attr)
+ if isinstance(attr, MovedModule):
+ _importer._add_module(attr, "moves." + attr.name)
+del attr
+
+_MovedItems._moved_attributes = _moved_attributes
+
+moves = _MovedItems(__name__ + ".moves")
+_importer._add_module(moves, "moves")
+
+
+class Module_six_moves_urllib_parse(_LazyModule):
+
+ """Lazy loading of moved objects in six.moves.urllib_parse"""
+
+
+_urllib_parse_moved_attributes = [
+ MovedAttribute("ParseResult", "urlparse", "urllib.parse"),
+ MovedAttribute("SplitResult", "urlparse", "urllib.parse"),
+ MovedAttribute("parse_qs", "urlparse", "urllib.parse"),
+ MovedAttribute("parse_qsl", "urlparse", "urllib.parse"),
+ MovedAttribute("urldefrag", "urlparse", "urllib.parse"),
+ MovedAttribute("urljoin", "urlparse", "urllib.parse"),
+ MovedAttribute("urlparse", "urlparse", "urllib.parse"),
+ MovedAttribute("urlsplit", "urlparse", "urllib.parse"),
+ MovedAttribute("urlunparse", "urlparse", "urllib.parse"),
+ MovedAttribute("urlunsplit", "urlparse", "urllib.parse"),
+ MovedAttribute("quote", "urllib", "urllib.parse"),
+ MovedAttribute("quote_plus", "urllib", "urllib.parse"),
+ MovedAttribute("unquote", "urllib", "urllib.parse"),
+ MovedAttribute("unquote_plus", "urllib", "urllib.parse"),
+ MovedAttribute(
+ "unquote_to_bytes", "urllib", "urllib.parse", "unquote", "unquote_to_bytes"
+ ),
+ MovedAttribute("urlencode", "urllib", "urllib.parse"),
+ MovedAttribute("splitquery", "urllib", "urllib.parse"),
+ MovedAttribute("splittag", "urllib", "urllib.parse"),
+ MovedAttribute("splituser", "urllib", "urllib.parse"),
+ MovedAttribute("splitvalue", "urllib", "urllib.parse"),
+ MovedAttribute("uses_fragment", "urlparse", "urllib.parse"),
+ MovedAttribute("uses_netloc", "urlparse", "urllib.parse"),
+ MovedAttribute("uses_params", "urlparse", "urllib.parse"),
+ MovedAttribute("uses_query", "urlparse", "urllib.parse"),
+ MovedAttribute("uses_relative", "urlparse", "urllib.parse"),
+]
+for attr in _urllib_parse_moved_attributes:
+ setattr(Module_six_moves_urllib_parse, attr.name, attr)
+del attr
+
+Module_six_moves_urllib_parse._moved_attributes = _urllib_parse_moved_attributes
+
+_importer._add_module(
+ Module_six_moves_urllib_parse(__name__ + ".moves.urllib_parse"),
+ "moves.urllib_parse",
+ "moves.urllib.parse",
+)
+
+
+class Module_six_moves_urllib_error(_LazyModule):
+
+ """Lazy loading of moved objects in six.moves.urllib_error"""
+
+
+_urllib_error_moved_attributes = [
+ MovedAttribute("URLError", "urllib2", "urllib.error"),
+ MovedAttribute("HTTPError", "urllib2", "urllib.error"),
+ MovedAttribute("ContentTooShortError", "urllib", "urllib.error"),
+]
+for attr in _urllib_error_moved_attributes:
+ setattr(Module_six_moves_urllib_error, attr.name, attr)
+del attr
+
+Module_six_moves_urllib_error._moved_attributes = _urllib_error_moved_attributes
+
+_importer._add_module(
+ Module_six_moves_urllib_error(__name__ + ".moves.urllib.error"),
+ "moves.urllib_error",
+ "moves.urllib.error",
+)
+
+
+class Module_six_moves_urllib_request(_LazyModule):
+
+ """Lazy loading of moved objects in six.moves.urllib_request"""
+
+
+_urllib_request_moved_attributes = [
+ MovedAttribute("urlopen", "urllib2", "urllib.request"),
+ MovedAttribute("install_opener", "urllib2", "urllib.request"),
+ MovedAttribute("build_opener", "urllib2", "urllib.request"),
+ MovedAttribute("pathname2url", "urllib", "urllib.request"),
+ MovedAttribute("url2pathname", "urllib", "urllib.request"),
+ MovedAttribute("getproxies", "urllib", "urllib.request"),
+ MovedAttribute("Request", "urllib2", "urllib.request"),
+ MovedAttribute("OpenerDirector", "urllib2", "urllib.request"),
+ MovedAttribute("HTTPDefaultErrorHandler", "urllib2", "urllib.request"),
+ MovedAttribute("HTTPRedirectHandler", "urllib2", "urllib.request"),
+ MovedAttribute("HTTPCookieProcessor", "urllib2", "urllib.request"),
+ MovedAttribute("ProxyHandler", "urllib2", "urllib.request"),
+ MovedAttribute("BaseHandler", "urllib2", "urllib.request"),
+ MovedAttribute("HTTPPasswordMgr", "urllib2", "urllib.request"),
+ MovedAttribute("HTTPPasswordMgrWithDefaultRealm", "urllib2", "urllib.request"),
+ MovedAttribute("AbstractBasicAuthHandler", "urllib2", "urllib.request"),
+ MovedAttribute("HTTPBasicAuthHandler", "urllib2", "urllib.request"),
+ MovedAttribute("ProxyBasicAuthHandler", "urllib2", "urllib.request"),
+ MovedAttribute("AbstractDigestAuthHandler", "urllib2", "urllib.request"),
+ MovedAttribute("HTTPDigestAuthHandler", "urllib2", "urllib.request"),
+ MovedAttribute("ProxyDigestAuthHandler", "urllib2", "urllib.request"),
+ MovedAttribute("HTTPHandler", "urllib2", "urllib.request"),
+ MovedAttribute("HTTPSHandler", "urllib2", "urllib.request"),
+ MovedAttribute("FileHandler", "urllib2", "urllib.request"),
+ MovedAttribute("FTPHandler", "urllib2", "urllib.request"),
+ MovedAttribute("CacheFTPHandler", "urllib2", "urllib.request"),
+ MovedAttribute("UnknownHandler", "urllib2", "urllib.request"),
+ MovedAttribute("HTTPErrorProcessor", "urllib2", "urllib.request"),
+ MovedAttribute("urlretrieve", "urllib", "urllib.request"),
+ MovedAttribute("urlcleanup", "urllib", "urllib.request"),
+ MovedAttribute("URLopener", "urllib", "urllib.request"),
+ MovedAttribute("FancyURLopener", "urllib", "urllib.request"),
+ MovedAttribute("proxy_bypass", "urllib", "urllib.request"),
+ MovedAttribute("parse_http_list", "urllib2", "urllib.request"),
+ MovedAttribute("parse_keqv_list", "urllib2", "urllib.request"),
+]
+for attr in _urllib_request_moved_attributes:
+ setattr(Module_six_moves_urllib_request, attr.name, attr)
+del attr
+
+Module_six_moves_urllib_request._moved_attributes = _urllib_request_moved_attributes
+
+_importer._add_module(
+ Module_six_moves_urllib_request(__name__ + ".moves.urllib.request"),
+ "moves.urllib_request",
+ "moves.urllib.request",
+)
+
+
+class Module_six_moves_urllib_response(_LazyModule):
+
+ """Lazy loading of moved objects in six.moves.urllib_response"""
+
+
+_urllib_response_moved_attributes = [
+ MovedAttribute("addbase", "urllib", "urllib.response"),
+ MovedAttribute("addclosehook", "urllib", "urllib.response"),
+ MovedAttribute("addinfo", "urllib", "urllib.response"),
+ MovedAttribute("addinfourl", "urllib", "urllib.response"),
+]
+for attr in _urllib_response_moved_attributes:
+ setattr(Module_six_moves_urllib_response, attr.name, attr)
+del attr
+
+Module_six_moves_urllib_response._moved_attributes = _urllib_response_moved_attributes
+
+_importer._add_module(
+ Module_six_moves_urllib_response(__name__ + ".moves.urllib.response"),
+ "moves.urllib_response",
+ "moves.urllib.response",
+)
+
+
+class Module_six_moves_urllib_robotparser(_LazyModule):
+
+ """Lazy loading of moved objects in six.moves.urllib_robotparser"""
+
+
+_urllib_robotparser_moved_attributes = [
+ MovedAttribute("RobotFileParser", "robotparser", "urllib.robotparser"),
+]
+for attr in _urllib_robotparser_moved_attributes:
+ setattr(Module_six_moves_urllib_robotparser, attr.name, attr)
+del attr
+
+Module_six_moves_urllib_robotparser._moved_attributes = (
+ _urllib_robotparser_moved_attributes
+)
+
+_importer._add_module(
+ Module_six_moves_urllib_robotparser(__name__ + ".moves.urllib.robotparser"),
+ "moves.urllib_robotparser",
+ "moves.urllib.robotparser",
+)
+
+
+class Module_six_moves_urllib(types.ModuleType):
+
+ """Create a six.moves.urllib namespace that resembles the Python 3 namespace"""
+
+ __path__ = [] # mark as package
+ parse = _importer._get_module("moves.urllib_parse")
+ error = _importer._get_module("moves.urllib_error")
+ request = _importer._get_module("moves.urllib_request")
+ response = _importer._get_module("moves.urllib_response")
+ robotparser = _importer._get_module("moves.urllib_robotparser")
+
+ def __dir__(self):
+ return ["parse", "error", "request", "response", "robotparser"]
+
+
+_importer._add_module(
+ Module_six_moves_urllib(__name__ + ".moves.urllib"), "moves.urllib"
+)
+
+
+def add_move(move):
+ """Add an item to six.moves."""
+ setattr(_MovedItems, move.name, move)
+
+
+def remove_move(name):
+ """Remove item from six.moves."""
+ try:
+ delattr(_MovedItems, name)
+ except AttributeError:
+ try:
+ del moves.__dict__[name]
+ except KeyError:
+ raise AttributeError("no such move, %r" % (name,))
+
+
+if PY3:
+ _meth_func = "__func__"
+ _meth_self = "__self__"
+
+ _func_closure = "__closure__"
+ _func_code = "__code__"
+ _func_defaults = "__defaults__"
+ _func_globals = "__globals__"
+else:
+ _meth_func = "im_func"
+ _meth_self = "im_self"
+
+ _func_closure = "func_closure"
+ _func_code = "func_code"
+ _func_defaults = "func_defaults"
+ _func_globals = "func_globals"
+
+
+try:
+ advance_iterator = next
+except NameError:
+
+ def advance_iterator(it):
+ return it.next()
+
+
+next = advance_iterator
+
+
+try:
+ callable = callable
+except NameError:
+
+ def callable(obj):
+ return any("__call__" in klass.__dict__ for klass in type(obj).__mro__)
+
+
+if PY3:
+
+ def get_unbound_function(unbound):
+ return unbound
+
+ create_bound_method = types.MethodType
+
+ def create_unbound_method(func, cls):
+ return func
+
+ Iterator = object
+else:
+
+ def get_unbound_function(unbound):
+ return unbound.im_func
+
+ def create_bound_method(func, obj):
+ return types.MethodType(func, obj, obj.__class__)
+
+ def create_unbound_method(func, cls):
+ return types.MethodType(func, None, cls)
+
+ class Iterator(object):
+ def next(self):
+ return type(self).__next__(self)
+
+ callable = callable
+_add_doc(
+ get_unbound_function, """Get the function out of a possibly unbound function"""
+)
+
+
+get_method_function = operator.attrgetter(_meth_func)
+get_method_self = operator.attrgetter(_meth_self)
+get_function_closure = operator.attrgetter(_func_closure)
+get_function_code = operator.attrgetter(_func_code)
+get_function_defaults = operator.attrgetter(_func_defaults)
+get_function_globals = operator.attrgetter(_func_globals)
+
+
+if PY3:
+
+ def iterkeys(d, **kw):
+ return iter(d.keys(**kw))
+
+ def itervalues(d, **kw):
+ return iter(d.values(**kw))
+
+ def iteritems(d, **kw):
+ return iter(d.items(**kw))
+
+ def iterlists(d, **kw):
+ return iter(d.lists(**kw))
+
+ viewkeys = operator.methodcaller("keys")
+
+ viewvalues = operator.methodcaller("values")
+
+ viewitems = operator.methodcaller("items")
+else:
+
+ def iterkeys(d, **kw):
+ return d.iterkeys(**kw)
+
+ def itervalues(d, **kw):
+ return d.itervalues(**kw)
+
+ def iteritems(d, **kw):
+ return d.iteritems(**kw)
+
+ def iterlists(d, **kw):
+ return d.iterlists(**kw)
+
+ viewkeys = operator.methodcaller("viewkeys")
+
+ viewvalues = operator.methodcaller("viewvalues")
+
+ viewitems = operator.methodcaller("viewitems")
+
+_add_doc(iterkeys, "Return an iterator over the keys of a dictionary.")
+_add_doc(itervalues, "Return an iterator over the values of a dictionary.")
+_add_doc(iteritems, "Return an iterator over the (key, value) pairs of a dictionary.")
+_add_doc(
+ iterlists, "Return an iterator over the (key, [values]) pairs of a dictionary."
+)
+
+
+if PY3:
+
+ def b(s):
+ return s.encode("latin-1")
+
+ def u(s):
+ return s
+
+ unichr = chr
+ import struct
+
+ int2byte = struct.Struct(">B").pack
+ del struct
+ byte2int = operator.itemgetter(0)
+ indexbytes = operator.getitem
+ iterbytes = iter
+ import io
+
+ StringIO = io.StringIO
+ BytesIO = io.BytesIO
+ del io
+ _assertCountEqual = "assertCountEqual"
+ if sys.version_info[1] <= 1:
+ _assertRaisesRegex = "assertRaisesRegexp"
+ _assertRegex = "assertRegexpMatches"
+ _assertNotRegex = "assertNotRegexpMatches"
+ else:
+ _assertRaisesRegex = "assertRaisesRegex"
+ _assertRegex = "assertRegex"
+ _assertNotRegex = "assertNotRegex"
+else:
+
+ def b(s):
+ return s
+
+ # Workaround for standalone backslash
+
+ def u(s):
+ return unicode(s.replace(r"\\", r"\\\\"), "unicode_escape")
+
+ unichr = unichr
+ int2byte = chr
+
+ def byte2int(bs):
+ return ord(bs[0])
+
+ def indexbytes(buf, i):
+ return ord(buf[i])
+
+ iterbytes = functools.partial(itertools.imap, ord)
+ import StringIO
+
+ StringIO = BytesIO = StringIO.StringIO
+ _assertCountEqual = "assertItemsEqual"
+ _assertRaisesRegex = "assertRaisesRegexp"
+ _assertRegex = "assertRegexpMatches"
+ _assertNotRegex = "assertNotRegexpMatches"
+_add_doc(b, """Byte literal""")
+_add_doc(u, """Text literal""")
+
+
+def assertCountEqual(self, *args, **kwargs):
+ return getattr(self, _assertCountEqual)(*args, **kwargs)
+
+
+def assertRaisesRegex(self, *args, **kwargs):
+ return getattr(self, _assertRaisesRegex)(*args, **kwargs)
+
+
+def assertRegex(self, *args, **kwargs):
+ return getattr(self, _assertRegex)(*args, **kwargs)
+
+
+def assertNotRegex(self, *args, **kwargs):
+ return getattr(self, _assertNotRegex)(*args, **kwargs)
+
+
+if PY3:
+ exec_ = getattr(moves.builtins, "exec")
+
+ def reraise(tp, value, tb=None):
+ try:
+ if value is None:
+ value = tp()
+ if value.__traceback__ is not tb:
+ raise value.with_traceback(tb)
+ raise value
+ finally:
+ value = None
+ tb = None
+
+
+else:
+
+ def exec_(_code_, _globs_=None, _locs_=None):
+ """Execute code in a namespace."""
+ if _globs_ is None:
+ frame = sys._getframe(1)
+ _globs_ = frame.f_globals
+ if _locs_ is None:
+ _locs_ = frame.f_locals
+ del frame
+ elif _locs_ is None:
+ _locs_ = _globs_
+ exec ("""exec _code_ in _globs_, _locs_""")
+
+ exec_(
+ """def reraise(tp, value, tb=None):
+ try:
+ raise tp, value, tb
+ finally:
+ tb = None
+"""
+ )
+
+
+if sys.version_info[:2] > (3,):
+ exec_(
+ """def raise_from(value, from_value):
+ try:
+ raise value from from_value
+ finally:
+ value = None
+"""
+ )
+else:
+
+ def raise_from(value, from_value):
+ raise value
+
+
+print_ = getattr(moves.builtins, "print", None)
+if print_ is None:
+
+ def print_(*args, **kwargs):
+ """The new-style print function for Python 2.4 and 2.5."""
+ fp = kwargs.pop("file", sys.stdout)
+ if fp is None:
+ return
+
+ def write(data):
+ if not isinstance(data, basestring):
+ data = str(data)
+ # If the file has an encoding, encode unicode with it.
+ if (
+ isinstance(fp, file)
+ and isinstance(data, unicode)
+ and fp.encoding is not None
+ ):
+ errors = getattr(fp, "errors", None)
+ if errors is None:
+ errors = "strict"
+ data = data.encode(fp.encoding, errors)
+ fp.write(data)
+
+ want_unicode = False
+ sep = kwargs.pop("sep", None)
+ if sep is not None:
+ if isinstance(sep, unicode):
+ want_unicode = True
+ elif not isinstance(sep, str):
+ raise TypeError("sep must be None or a string")
+ end = kwargs.pop("end", None)
+ if end is not None:
+ if isinstance(end, unicode):
+ want_unicode = True
+ elif not isinstance(end, str):
+ raise TypeError("end must be None or a string")
+ if kwargs:
+ raise TypeError("invalid keyword arguments to print()")
+ if not want_unicode:
+ for arg in args:
+ if isinstance(arg, unicode):
+ want_unicode = True
+ break
+ if want_unicode:
+ newline = unicode("\n")
+ space = unicode(" ")
+ else:
+ newline = "\n"
+ space = " "
+ if sep is None:
+ sep = space
+ if end is None:
+ end = newline
+ for i, arg in enumerate(args):
+ if i:
+ write(sep)
+ write(arg)
+ write(end)
+
+
+if sys.version_info[:2] < (3, 3):
+ _print = print_
+
+ def print_(*args, **kwargs):
+ fp = kwargs.get("file", sys.stdout)
+ flush = kwargs.pop("flush", False)
+ _print(*args, **kwargs)
+ if flush and fp is not None:
+ fp.flush()
+
+
+_add_doc(reraise, """Reraise an exception.""")
+
+if sys.version_info[0:2] < (3, 4):
+ # This does exactly the same what the :func:`py3:functools.update_wrapper`
+ # function does on Python versions after 3.2. It sets the ``__wrapped__``
+ # attribute on ``wrapper`` object and it doesn't raise an error if any of
+ # the attributes mentioned in ``assigned`` and ``updated`` are missing on
+ # ``wrapped`` object.
+ def _update_wrapper(
+ wrapper,
+ wrapped,
+ assigned=functools.WRAPPER_ASSIGNMENTS,
+ updated=functools.WRAPPER_UPDATES,
+ ):
+ for attr in assigned:
+ try:
+ value = getattr(wrapped, attr)
+ except AttributeError:
+ continue
+ else:
+ setattr(wrapper, attr, value)
+ for attr in updated:
+ getattr(wrapper, attr).update(getattr(wrapped, attr, {}))
+ wrapper.__wrapped__ = wrapped
+ return wrapper
+
+ _update_wrapper.__doc__ = functools.update_wrapper.__doc__
+
+ def wraps(
+ wrapped,
+ assigned=functools.WRAPPER_ASSIGNMENTS,
+ updated=functools.WRAPPER_UPDATES,
+ ):
+ return functools.partial(
+ _update_wrapper, wrapped=wrapped, assigned=assigned, updated=updated
+ )
+
+ wraps.__doc__ = functools.wraps.__doc__
+
+else:
+ wraps = functools.wraps
+
+
+def with_metaclass(meta, *bases):
+ """Create a base class with a metaclass."""
+ # This requires a bit of explanation: the basic idea is to make a dummy
+ # metaclass for one level of class instantiation that replaces itself with
+ # the actual metaclass.
+ class metaclass(type):
+ def __new__(cls, name, this_bases, d):
+ if sys.version_info[:2] >= (3, 7):
+ # This version introduced PEP 560 that requires a bit
+ # of extra care (we mimic what is done by __build_class__).
+ resolved_bases = types.resolve_bases(bases)
+ if resolved_bases is not bases:
+ d["__orig_bases__"] = bases
+ else:
+ resolved_bases = bases
+ return meta(name, resolved_bases, d)
+
+ @classmethod
+ def __prepare__(cls, name, this_bases):
+ return meta.__prepare__(name, bases)
+
+ return type.__new__(metaclass, "temporary_class", (), {})
+
+
+def add_metaclass(metaclass):
+ """Class decorator for creating a class with a metaclass."""
+
+ def wrapper(cls):
+ orig_vars = cls.__dict__.copy()
+ slots = orig_vars.get("__slots__")
+ if slots is not None:
+ if isinstance(slots, str):
+ slots = [slots]
+ for slots_var in slots:
+ orig_vars.pop(slots_var)
+ orig_vars.pop("__dict__", None)
+ orig_vars.pop("__weakref__", None)
+ if hasattr(cls, "__qualname__"):
+ orig_vars["__qualname__"] = cls.__qualname__
+ return metaclass(cls.__name__, cls.__bases__, orig_vars)
+
+ return wrapper
+
+
+def ensure_binary(s, encoding="utf-8", errors="strict"):
+ """Coerce **s** to six.binary_type.
+
+ For Python 2:
+ - `unicode` -> encoded to `str`
+ - `str` -> `str`
+
+ For Python 3:
+ - `str` -> encoded to `bytes`
+ - `bytes` -> `bytes`
+ """
+ if isinstance(s, binary_type):
+ return s
+ if isinstance(s, text_type):
+ return s.encode(encoding, errors)
+ raise TypeError("not expecting type '%s'" % type(s))
+
+
+def ensure_str(s, encoding="utf-8", errors="strict"):
+ """Coerce *s* to `str`.
+
+ For Python 2:
+ - `unicode` -> encoded to `str`
+ - `str` -> `str`
+
+ For Python 3:
+ - `str` -> `str`
+ - `bytes` -> decoded to `str`
+ """
+ # Optimization: Fast return for the common case.
+ if type(s) is str:
+ return s
+ if PY2 and isinstance(s, text_type):
+ return s.encode(encoding, errors)
+ elif PY3 and isinstance(s, binary_type):
+ return s.decode(encoding, errors)
+ elif not isinstance(s, (text_type, binary_type)):
+ raise TypeError("not expecting type '%s'" % type(s))
+ return s
+
+
+def ensure_text(s, encoding="utf-8", errors="strict"):
+ """Coerce *s* to six.text_type.
+
+ For Python 2:
+ - `unicode` -> `unicode`
+ - `str` -> `unicode`
+
+ For Python 3:
+ - `str` -> `str`
+ - `bytes` -> decoded to `str`
+ """
+ if isinstance(s, binary_type):
+ return s.decode(encoding, errors)
+ elif isinstance(s, text_type):
+ return s
+ else:
+ raise TypeError("not expecting type '%s'" % type(s))
+
+
+def python_2_unicode_compatible(klass):
+ """
+ A class decorator that defines __unicode__ and __str__ methods under Python 2.
+ Under Python 3 it does nothing.
+
+ To support Python 2 and 3 with a single code base, define a __str__ method
+ returning text and apply this decorator to the class.
+ """
+ if PY2:
+ if "__str__" not in klass.__dict__:
+ raise ValueError(
+ "@python_2_unicode_compatible cannot be applied "
+ "to %s because it doesn't define __str__()." % klass.__name__
+ )
+ klass.__unicode__ = klass.__str__
+ klass.__str__ = lambda self: self.__unicode__().encode("utf-8")
+ return klass
+
+
+# Complete the moves implementation.
+# This code is at the end of this module to speed up module loading.
+# Turn this module into a package.
+__path__ = [] # required for PEP 302 and PEP 451
+__package__ = __name__ # see PEP 366 @ReservedAssignment
+if globals().get("__spec__") is not None:
+ __spec__.submodule_search_locations = [] # PEP 451 @UndefinedVariable
+# Remove other six meta path importers, since they cause problems. This can
+# happen if six is removed from sys.modules and then reloaded. (Setuptools does
+# this for some reason.)
+if sys.meta_path:
+ for i, importer in enumerate(sys.meta_path):
+ # Here's some real nastiness: Another "instance" of the six module might
+ # be floating around. Therefore, we can't use isinstance() to check for
+ # the six meta path importer, since the other six instance will have
+ # inserted an importer with different class.
+ if (
+ type(importer).__name__ == "_SixMetaPathImporter"
+ and importer.name == __name__
+ ):
+ del sys.meta_path[i]
+ break
+ del i, importer
+# Finally, add the importer to the meta path import hook.
+sys.meta_path.append(_importer)
diff --git a/openpype/hosts/fusion/vendor/urllib3/packages/ssl_match_hostname/__init__.py b/openpype/hosts/fusion/vendor/urllib3/packages/ssl_match_hostname/__init__.py
new file mode 100644
index 0000000000..ef3fde5206
--- /dev/null
+++ b/openpype/hosts/fusion/vendor/urllib3/packages/ssl_match_hostname/__init__.py
@@ -0,0 +1,24 @@
+import sys
+
+try:
+ # Our match_hostname function is the same as 3.10's, so we only want to
+ # import the match_hostname function if it's at least that good.
+ # We also fallback on Python 3.10+ because our code doesn't emit
+ # deprecation warnings and is the same as Python 3.10 otherwise.
+ if sys.version_info < (3, 5) or sys.version_info >= (3, 10):
+ raise ImportError("Fallback to vendored code")
+
+ from ssl import CertificateError, match_hostname
+except ImportError:
+ try:
+ # Backport of the function from a pypi module
+ from backports.ssl_match_hostname import ( # type: ignore
+ CertificateError,
+ match_hostname,
+ )
+ except ImportError:
+ # Our vendored copy
+ from ._implementation import CertificateError, match_hostname # type: ignore
+
+# Not needed, but documenting what we provide.
+__all__ = ("CertificateError", "match_hostname")
diff --git a/openpype/hosts/fusion/vendor/urllib3/packages/ssl_match_hostname/_implementation.py b/openpype/hosts/fusion/vendor/urllib3/packages/ssl_match_hostname/_implementation.py
new file mode 100644
index 0000000000..689208d3c6
--- /dev/null
+++ b/openpype/hosts/fusion/vendor/urllib3/packages/ssl_match_hostname/_implementation.py
@@ -0,0 +1,160 @@
+"""The match_hostname() function from Python 3.3.3, essential when using SSL."""
+
+# Note: This file is under the PSF license as the code comes from the python
+# stdlib. http://docs.python.org/3/license.html
+
+import re
+import sys
+
+# ipaddress has been backported to 2.6+ in pypi. If it is installed on the
+# system, use it to handle IPAddress ServerAltnames (this was added in
+# python-3.5) otherwise only do DNS matching. This allows
+# backports.ssl_match_hostname to continue to be used in Python 2.7.
+try:
+ import ipaddress
+except ImportError:
+ ipaddress = None
+
+__version__ = "3.5.0.1"
+
+
+class CertificateError(ValueError):
+ pass
+
+
+def _dnsname_match(dn, hostname, max_wildcards=1):
+ """Matching according to RFC 6125, section 6.4.3
+
+ http://tools.ietf.org/html/rfc6125#section-6.4.3
+ """
+ pats = []
+ if not dn:
+ return False
+
+ # Ported from python3-syntax:
+ # leftmost, *remainder = dn.split(r'.')
+ parts = dn.split(r".")
+ leftmost = parts[0]
+ remainder = parts[1:]
+
+ wildcards = leftmost.count("*")
+ if wildcards > max_wildcards:
+ # Issue #17980: avoid denials of service by refusing more
+ # than one wildcard per fragment. A survey of established
+ # policy among SSL implementations showed it to be a
+ # reasonable choice.
+ raise CertificateError(
+ "too many wildcards in certificate DNS name: " + repr(dn)
+ )
+
+ # speed up common case w/o wildcards
+ if not wildcards:
+ return dn.lower() == hostname.lower()
+
+ # RFC 6125, section 6.4.3, subitem 1.
+ # The client SHOULD NOT attempt to match a presented identifier in which
+ # the wildcard character comprises a label other than the left-most label.
+ if leftmost == "*":
+ # When '*' is a fragment by itself, it matches a non-empty dotless
+ # fragment.
+ pats.append("[^.]+")
+ elif leftmost.startswith("xn--") or hostname.startswith("xn--"):
+ # RFC 6125, section 6.4.3, subitem 3.
+ # The client SHOULD NOT attempt to match a presented identifier
+ # where the wildcard character is embedded within an A-label or
+ # U-label of an internationalized domain name.
+ pats.append(re.escape(leftmost))
+ else:
+ # Otherwise, '*' matches any dotless string, e.g. www*
+ pats.append(re.escape(leftmost).replace(r"\*", "[^.]*"))
+
+ # add the remaining fragments, ignore any wildcards
+ for frag in remainder:
+ pats.append(re.escape(frag))
+
+ pat = re.compile(r"\A" + r"\.".join(pats) + r"\Z", re.IGNORECASE)
+ return pat.match(hostname)
+
+
+def _to_unicode(obj):
+ if isinstance(obj, str) and sys.version_info < (3,):
+ obj = unicode(obj, encoding="ascii", errors="strict")
+ return obj
+
+
+def _ipaddress_match(ipname, host_ip):
+ """Exact matching of IP addresses.
+
+ RFC 6125 explicitly doesn't define an algorithm for this
+ (section 1.7.2 - "Out of Scope").
+ """
+ # OpenSSL may add a trailing newline to a subjectAltName's IP address
+ # Divergence from upstream: ipaddress can't handle byte str
+ ip = ipaddress.ip_address(_to_unicode(ipname).rstrip())
+ return ip == host_ip
+
+
+def match_hostname(cert, hostname):
+ """Verify that *cert* (in decoded format as returned by
+ SSLSocket.getpeercert()) matches the *hostname*. RFC 2818 and RFC 6125
+ rules are followed, but IP addresses are not accepted for *hostname*.
+
+ CertificateError is raised on failure. On success, the function
+ returns nothing.
+ """
+ if not cert:
+ raise ValueError(
+ "empty or no certificate, match_hostname needs a "
+ "SSL socket or SSL context with either "
+ "CERT_OPTIONAL or CERT_REQUIRED"
+ )
+ try:
+ # Divergence from upstream: ipaddress can't handle byte str
+ host_ip = ipaddress.ip_address(_to_unicode(hostname))
+ except ValueError:
+ # Not an IP address (common case)
+ host_ip = None
+ except UnicodeError:
+ # Divergence from upstream: Have to deal with ipaddress not taking
+ # byte strings. addresses should be all ascii, so we consider it not
+ # an ipaddress in this case
+ host_ip = None
+ except AttributeError:
+ # Divergence from upstream: Make ipaddress library optional
+ if ipaddress is None:
+ host_ip = None
+ else:
+ raise
+ dnsnames = []
+ san = cert.get("subjectAltName", ())
+ for key, value in san:
+ if key == "DNS":
+ if host_ip is None and _dnsname_match(value, hostname):
+ return
+ dnsnames.append(value)
+ elif key == "IP Address":
+ if host_ip is not None and _ipaddress_match(value, host_ip):
+ return
+ dnsnames.append(value)
+ if not dnsnames:
+ # The subject is only checked when there is no dNSName entry
+ # in subjectAltName
+ for sub in cert.get("subject", ()):
+ for key, value in sub:
+ # XXX according to RFC 2818, the most specific Common Name
+ # must be used.
+ if key == "commonName":
+ if _dnsname_match(value, hostname):
+ return
+ dnsnames.append(value)
+ if len(dnsnames) > 1:
+ raise CertificateError(
+ "hostname %r "
+ "doesn't match either of %s" % (hostname, ", ".join(map(repr, dnsnames)))
+ )
+ elif len(dnsnames) == 1:
+ raise CertificateError("hostname %r doesn't match %r" % (hostname, dnsnames[0]))
+ else:
+ raise CertificateError(
+ "no appropriate commonName or subjectAltName fields were found"
+ )
diff --git a/openpype/hosts/fusion/vendor/urllib3/poolmanager.py b/openpype/hosts/fusion/vendor/urllib3/poolmanager.py
new file mode 100644
index 0000000000..3a31a285bf
--- /dev/null
+++ b/openpype/hosts/fusion/vendor/urllib3/poolmanager.py
@@ -0,0 +1,536 @@
+from __future__ import absolute_import
+
+import collections
+import functools
+import logging
+
+from ._collections import RecentlyUsedContainer
+from .connectionpool import HTTPConnectionPool, HTTPSConnectionPool, port_by_scheme
+from .exceptions import (
+ LocationValueError,
+ MaxRetryError,
+ ProxySchemeUnknown,
+ ProxySchemeUnsupported,
+ URLSchemeUnknown,
+)
+from .packages import six
+from .packages.six.moves.urllib.parse import urljoin
+from .request import RequestMethods
+from .util.proxy import connection_requires_http_tunnel
+from .util.retry import Retry
+from .util.url import parse_url
+
+__all__ = ["PoolManager", "ProxyManager", "proxy_from_url"]
+
+
+log = logging.getLogger(__name__)
+
+SSL_KEYWORDS = (
+ "key_file",
+ "cert_file",
+ "cert_reqs",
+ "ca_certs",
+ "ssl_version",
+ "ca_cert_dir",
+ "ssl_context",
+ "key_password",
+)
+
+# All known keyword arguments that could be provided to the pool manager, its
+# pools, or the underlying connections. This is used to construct a pool key.
+_key_fields = (
+ "key_scheme", # str
+ "key_host", # str
+ "key_port", # int
+ "key_timeout", # int or float or Timeout
+ "key_retries", # int or Retry
+ "key_strict", # bool
+ "key_block", # bool
+ "key_source_address", # str
+ "key_key_file", # str
+ "key_key_password", # str
+ "key_cert_file", # str
+ "key_cert_reqs", # str
+ "key_ca_certs", # str
+ "key_ssl_version", # str
+ "key_ca_cert_dir", # str
+ "key_ssl_context", # instance of ssl.SSLContext or urllib3.util.ssl_.SSLContext
+ "key_maxsize", # int
+ "key_headers", # dict
+ "key__proxy", # parsed proxy url
+ "key__proxy_headers", # dict
+ "key__proxy_config", # class
+ "key_socket_options", # list of (level (int), optname (int), value (int or str)) tuples
+ "key__socks_options", # dict
+ "key_assert_hostname", # bool or string
+ "key_assert_fingerprint", # str
+ "key_server_hostname", # str
+)
+
+#: The namedtuple class used to construct keys for the connection pool.
+#: All custom key schemes should include the fields in this key at a minimum.
+PoolKey = collections.namedtuple("PoolKey", _key_fields)
+
+_proxy_config_fields = ("ssl_context", "use_forwarding_for_https")
+ProxyConfig = collections.namedtuple("ProxyConfig", _proxy_config_fields)
+
+
+def _default_key_normalizer(key_class, request_context):
+ """
+ Create a pool key out of a request context dictionary.
+
+ According to RFC 3986, both the scheme and host are case-insensitive.
+ Therefore, this function normalizes both before constructing the pool
+ key for an HTTPS request. If you wish to change this behaviour, provide
+ alternate callables to ``key_fn_by_scheme``.
+
+ :param key_class:
+ The class to use when constructing the key. This should be a namedtuple
+ with the ``scheme`` and ``host`` keys at a minimum.
+ :type key_class: namedtuple
+ :param request_context:
+ A dictionary-like object that contain the context for a request.
+ :type request_context: dict
+
+ :return: A namedtuple that can be used as a connection pool key.
+ :rtype: PoolKey
+ """
+ # Since we mutate the dictionary, make a copy first
+ context = request_context.copy()
+ context["scheme"] = context["scheme"].lower()
+ context["host"] = context["host"].lower()
+
+ # These are both dictionaries and need to be transformed into frozensets
+ for key in ("headers", "_proxy_headers", "_socks_options"):
+ if key in context and context[key] is not None:
+ context[key] = frozenset(context[key].items())
+
+ # The socket_options key may be a list and needs to be transformed into a
+ # tuple.
+ socket_opts = context.get("socket_options")
+ if socket_opts is not None:
+ context["socket_options"] = tuple(socket_opts)
+
+ # Map the kwargs to the names in the namedtuple - this is necessary since
+ # namedtuples can't have fields starting with '_'.
+ for key in list(context.keys()):
+ context["key_" + key] = context.pop(key)
+
+ # Default to ``None`` for keys missing from the context
+ for field in key_class._fields:
+ if field not in context:
+ context[field] = None
+
+ return key_class(**context)
+
+
+#: A dictionary that maps a scheme to a callable that creates a pool key.
+#: This can be used to alter the way pool keys are constructed, if desired.
+#: Each PoolManager makes a copy of this dictionary so they can be configured
+#: globally here, or individually on the instance.
+key_fn_by_scheme = {
+ "http": functools.partial(_default_key_normalizer, PoolKey),
+ "https": functools.partial(_default_key_normalizer, PoolKey),
+}
+
+pool_classes_by_scheme = {"http": HTTPConnectionPool, "https": HTTPSConnectionPool}
+
+
+class PoolManager(RequestMethods):
+ """
+ Allows for arbitrary requests while transparently keeping track of
+ necessary connection pools for you.
+
+ :param num_pools:
+ Number of connection pools to cache before discarding the least
+ recently used pool.
+
+ :param headers:
+ Headers to include with all requests, unless other headers are given
+ explicitly.
+
+ :param \\**connection_pool_kw:
+ Additional parameters are used to create fresh
+ :class:`urllib3.connectionpool.ConnectionPool` instances.
+
+ Example::
+
+ >>> manager = PoolManager(num_pools=2)
+ >>> r = manager.request('GET', 'http://google.com/')
+ >>> r = manager.request('GET', 'http://google.com/mail')
+ >>> r = manager.request('GET', 'http://yahoo.com/')
+ >>> len(manager.pools)
+ 2
+
+ """
+
+ proxy = None
+ proxy_config = None
+
+ def __init__(self, num_pools=10, headers=None, **connection_pool_kw):
+ RequestMethods.__init__(self, headers)
+ self.connection_pool_kw = connection_pool_kw
+ self.pools = RecentlyUsedContainer(num_pools, dispose_func=lambda p: p.close())
+
+ # Locally set the pool classes and keys so other PoolManagers can
+ # override them.
+ self.pool_classes_by_scheme = pool_classes_by_scheme
+ self.key_fn_by_scheme = key_fn_by_scheme.copy()
+
+ def __enter__(self):
+ return self
+
+ def __exit__(self, exc_type, exc_val, exc_tb):
+ self.clear()
+ # Return False to re-raise any potential exceptions
+ return False
+
+ def _new_pool(self, scheme, host, port, request_context=None):
+ """
+ Create a new :class:`urllib3.connectionpool.ConnectionPool` based on host, port, scheme, and
+ any additional pool keyword arguments.
+
+ If ``request_context`` is provided, it is provided as keyword arguments
+ to the pool class used. This method is used to actually create the
+ connection pools handed out by :meth:`connection_from_url` and
+ companion methods. It is intended to be overridden for customization.
+ """
+ pool_cls = self.pool_classes_by_scheme[scheme]
+ if request_context is None:
+ request_context = self.connection_pool_kw.copy()
+
+ # Although the context has everything necessary to create the pool,
+ # this function has historically only used the scheme, host, and port
+ # in the positional args. When an API change is acceptable these can
+ # be removed.
+ for key in ("scheme", "host", "port"):
+ request_context.pop(key, None)
+
+ if scheme == "http":
+ for kw in SSL_KEYWORDS:
+ request_context.pop(kw, None)
+
+ return pool_cls(host, port, **request_context)
+
+ def clear(self):
+ """
+ Empty our store of pools and direct them all to close.
+
+ This will not affect in-flight connections, but they will not be
+ re-used after completion.
+ """
+ self.pools.clear()
+
+ def connection_from_host(self, host, port=None, scheme="http", pool_kwargs=None):
+ """
+ Get a :class:`urllib3.connectionpool.ConnectionPool` based on the host, port, and scheme.
+
+ If ``port`` isn't given, it will be derived from the ``scheme`` using
+ ``urllib3.connectionpool.port_by_scheme``. If ``pool_kwargs`` is
+ provided, it is merged with the instance's ``connection_pool_kw``
+ variable and used to create the new connection pool, if one is
+ needed.
+ """
+
+ if not host:
+ raise LocationValueError("No host specified.")
+
+ request_context = self._merge_pool_kwargs(pool_kwargs)
+ request_context["scheme"] = scheme or "http"
+ if not port:
+ port = port_by_scheme.get(request_context["scheme"].lower(), 80)
+ request_context["port"] = port
+ request_context["host"] = host
+
+ return self.connection_from_context(request_context)
+
+ def connection_from_context(self, request_context):
+ """
+ Get a :class:`urllib3.connectionpool.ConnectionPool` based on the request context.
+
+ ``request_context`` must at least contain the ``scheme`` key and its
+ value must be a key in ``key_fn_by_scheme`` instance variable.
+ """
+ scheme = request_context["scheme"].lower()
+ pool_key_constructor = self.key_fn_by_scheme.get(scheme)
+ if not pool_key_constructor:
+ raise URLSchemeUnknown(scheme)
+ pool_key = pool_key_constructor(request_context)
+
+ return self.connection_from_pool_key(pool_key, request_context=request_context)
+
+ def connection_from_pool_key(self, pool_key, request_context=None):
+ """
+ Get a :class:`urllib3.connectionpool.ConnectionPool` based on the provided pool key.
+
+ ``pool_key`` should be a namedtuple that only contains immutable
+ objects. At a minimum it must have the ``scheme``, ``host``, and
+ ``port`` fields.
+ """
+ with self.pools.lock:
+ # If the scheme, host, or port doesn't match existing open
+ # connections, open a new ConnectionPool.
+ pool = self.pools.get(pool_key)
+ if pool:
+ return pool
+
+ # Make a fresh ConnectionPool of the desired type
+ scheme = request_context["scheme"]
+ host = request_context["host"]
+ port = request_context["port"]
+ pool = self._new_pool(scheme, host, port, request_context=request_context)
+ self.pools[pool_key] = pool
+
+ return pool
+
+ def connection_from_url(self, url, pool_kwargs=None):
+ """
+ Similar to :func:`urllib3.connectionpool.connection_from_url`.
+
+ If ``pool_kwargs`` is not provided and a new pool needs to be
+ constructed, ``self.connection_pool_kw`` is used to initialize
+ the :class:`urllib3.connectionpool.ConnectionPool`. If ``pool_kwargs``
+ is provided, it is used instead. Note that if a new pool does not
+ need to be created for the request, the provided ``pool_kwargs`` are
+ not used.
+ """
+ u = parse_url(url)
+ return self.connection_from_host(
+ u.host, port=u.port, scheme=u.scheme, pool_kwargs=pool_kwargs
+ )
+
+ def _merge_pool_kwargs(self, override):
+ """
+ Merge a dictionary of override values for self.connection_pool_kw.
+
+ This does not modify self.connection_pool_kw and returns a new dict.
+ Any keys in the override dictionary with a value of ``None`` are
+ removed from the merged dictionary.
+ """
+ base_pool_kwargs = self.connection_pool_kw.copy()
+ if override:
+ for key, value in override.items():
+ if value is None:
+ try:
+ del base_pool_kwargs[key]
+ except KeyError:
+ pass
+ else:
+ base_pool_kwargs[key] = value
+ return base_pool_kwargs
+
+ def _proxy_requires_url_absolute_form(self, parsed_url):
+ """
+ Indicates if the proxy requires the complete destination URL in the
+ request. Normally this is only needed when not using an HTTP CONNECT
+ tunnel.
+ """
+ if self.proxy is None:
+ return False
+
+ return not connection_requires_http_tunnel(
+ self.proxy, self.proxy_config, parsed_url.scheme
+ )
+
+ def _validate_proxy_scheme_url_selection(self, url_scheme):
+ """
+ Validates that were not attempting to do TLS in TLS connections on
+ Python2 or with unsupported SSL implementations.
+ """
+ if self.proxy is None or url_scheme != "https":
+ return
+
+ if self.proxy.scheme != "https":
+ return
+
+ if six.PY2 and not self.proxy_config.use_forwarding_for_https:
+ raise ProxySchemeUnsupported(
+ "Contacting HTTPS destinations through HTTPS proxies "
+ "'via CONNECT tunnels' is not supported in Python 2"
+ )
+
+ def urlopen(self, method, url, redirect=True, **kw):
+ """
+ Same as :meth:`urllib3.HTTPConnectionPool.urlopen`
+ with custom cross-host redirect logic and only sends the request-uri
+ portion of the ``url``.
+
+ The given ``url`` parameter must be absolute, such that an appropriate
+ :class:`urllib3.connectionpool.ConnectionPool` can be chosen for it.
+ """
+ u = parse_url(url)
+ self._validate_proxy_scheme_url_selection(u.scheme)
+
+ conn = self.connection_from_host(u.host, port=u.port, scheme=u.scheme)
+
+ kw["assert_same_host"] = False
+ kw["redirect"] = False
+
+ if "headers" not in kw:
+ kw["headers"] = self.headers.copy()
+
+ if self._proxy_requires_url_absolute_form(u):
+ response = conn.urlopen(method, url, **kw)
+ else:
+ response = conn.urlopen(method, u.request_uri, **kw)
+
+ redirect_location = redirect and response.get_redirect_location()
+ if not redirect_location:
+ return response
+
+ # Support relative URLs for redirecting.
+ redirect_location = urljoin(url, redirect_location)
+
+ # RFC 7231, Section 6.4.4
+ if response.status == 303:
+ method = "GET"
+
+ retries = kw.get("retries")
+ if not isinstance(retries, Retry):
+ retries = Retry.from_int(retries, redirect=redirect)
+
+ # Strip headers marked as unsafe to forward to the redirected location.
+ # Check remove_headers_on_redirect to avoid a potential network call within
+ # conn.is_same_host() which may use socket.gethostbyname() in the future.
+ if retries.remove_headers_on_redirect and not conn.is_same_host(
+ redirect_location
+ ):
+ headers = list(six.iterkeys(kw["headers"]))
+ for header in headers:
+ if header.lower() in retries.remove_headers_on_redirect:
+ kw["headers"].pop(header, None)
+
+ try:
+ retries = retries.increment(method, url, response=response, _pool=conn)
+ except MaxRetryError:
+ if retries.raise_on_redirect:
+ response.drain_conn()
+ raise
+ return response
+
+ kw["retries"] = retries
+ kw["redirect"] = redirect
+
+ log.info("Redirecting %s -> %s", url, redirect_location)
+
+ response.drain_conn()
+ return self.urlopen(method, redirect_location, **kw)
+
+
+class ProxyManager(PoolManager):
+ """
+ Behaves just like :class:`PoolManager`, but sends all requests through
+ the defined proxy, using the CONNECT method for HTTPS URLs.
+
+ :param proxy_url:
+ The URL of the proxy to be used.
+
+ :param proxy_headers:
+ A dictionary containing headers that will be sent to the proxy. In case
+ of HTTP they are being sent with each request, while in the
+ HTTPS/CONNECT case they are sent only once. Could be used for proxy
+ authentication.
+
+ :param proxy_ssl_context:
+ The proxy SSL context is used to establish the TLS connection to the
+ proxy when using HTTPS proxies.
+
+ :param use_forwarding_for_https:
+ (Defaults to False) If set to True will forward requests to the HTTPS
+ proxy to be made on behalf of the client instead of creating a TLS
+ tunnel via the CONNECT method. **Enabling this flag means that request
+ and response headers and content will be visible from the HTTPS proxy**
+ whereas tunneling keeps request and response headers and content
+ private. IP address, target hostname, SNI, and port are always visible
+ to an HTTPS proxy even when this flag is disabled.
+
+ Example:
+ >>> proxy = urllib3.ProxyManager('http://localhost:3128/')
+ >>> r1 = proxy.request('GET', 'http://google.com/')
+ >>> r2 = proxy.request('GET', 'http://httpbin.org/')
+ >>> len(proxy.pools)
+ 1
+ >>> r3 = proxy.request('GET', 'https://httpbin.org/')
+ >>> r4 = proxy.request('GET', 'https://twitter.com/')
+ >>> len(proxy.pools)
+ 3
+
+ """
+
+ def __init__(
+ self,
+ proxy_url,
+ num_pools=10,
+ headers=None,
+ proxy_headers=None,
+ proxy_ssl_context=None,
+ use_forwarding_for_https=False,
+ **connection_pool_kw
+ ):
+
+ if isinstance(proxy_url, HTTPConnectionPool):
+ proxy_url = "%s://%s:%i" % (
+ proxy_url.scheme,
+ proxy_url.host,
+ proxy_url.port,
+ )
+ proxy = parse_url(proxy_url)
+
+ if proxy.scheme not in ("http", "https"):
+ raise ProxySchemeUnknown(proxy.scheme)
+
+ if not proxy.port:
+ port = port_by_scheme.get(proxy.scheme, 80)
+ proxy = proxy._replace(port=port)
+
+ self.proxy = proxy
+ self.proxy_headers = proxy_headers or {}
+ self.proxy_ssl_context = proxy_ssl_context
+ self.proxy_config = ProxyConfig(proxy_ssl_context, use_forwarding_for_https)
+
+ connection_pool_kw["_proxy"] = self.proxy
+ connection_pool_kw["_proxy_headers"] = self.proxy_headers
+ connection_pool_kw["_proxy_config"] = self.proxy_config
+
+ super(ProxyManager, self).__init__(num_pools, headers, **connection_pool_kw)
+
+ def connection_from_host(self, host, port=None, scheme="http", pool_kwargs=None):
+ if scheme == "https":
+ return super(ProxyManager, self).connection_from_host(
+ host, port, scheme, pool_kwargs=pool_kwargs
+ )
+
+ return super(ProxyManager, self).connection_from_host(
+ self.proxy.host, self.proxy.port, self.proxy.scheme, pool_kwargs=pool_kwargs
+ )
+
+ def _set_proxy_headers(self, url, headers=None):
+ """
+ Sets headers needed by proxies: specifically, the Accept and Host
+ headers. Only sets headers not provided by the user.
+ """
+ headers_ = {"Accept": "*/*"}
+
+ netloc = parse_url(url).netloc
+ if netloc:
+ headers_["Host"] = netloc
+
+ if headers:
+ headers_.update(headers)
+ return headers_
+
+ def urlopen(self, method, url, redirect=True, **kw):
+ "Same as HTTP(S)ConnectionPool.urlopen, ``url`` must be absolute."
+ u = parse_url(url)
+ if not connection_requires_http_tunnel(self.proxy, self.proxy_config, u.scheme):
+ # For connections using HTTP CONNECT, httplib sets the necessary
+ # headers on the CONNECT to the proxy. If we're not using CONNECT,
+ # we'll definitely need to set 'Host' at the very least.
+ headers = kw.get("headers", self.headers)
+ kw["headers"] = self._set_proxy_headers(url, headers)
+
+ return super(ProxyManager, self).urlopen(method, url, redirect=redirect, **kw)
+
+
+def proxy_from_url(url, **kw):
+ return ProxyManager(proxy_url=url, **kw)
diff --git a/openpype/hosts/fusion/vendor/urllib3/request.py b/openpype/hosts/fusion/vendor/urllib3/request.py
new file mode 100644
index 0000000000..398386a5b9
--- /dev/null
+++ b/openpype/hosts/fusion/vendor/urllib3/request.py
@@ -0,0 +1,170 @@
+from __future__ import absolute_import
+
+from .filepost import encode_multipart_formdata
+from .packages.six.moves.urllib.parse import urlencode
+
+__all__ = ["RequestMethods"]
+
+
+class RequestMethods(object):
+ """
+ Convenience mixin for classes who implement a :meth:`urlopen` method, such
+ as :class:`urllib3.HTTPConnectionPool` and
+ :class:`urllib3.PoolManager`.
+
+ Provides behavior for making common types of HTTP request methods and
+ decides which type of request field encoding to use.
+
+ Specifically,
+
+ :meth:`.request_encode_url` is for sending requests whose fields are
+ encoded in the URL (such as GET, HEAD, DELETE).
+
+ :meth:`.request_encode_body` is for sending requests whose fields are
+ encoded in the *body* of the request using multipart or www-form-urlencoded
+ (such as for POST, PUT, PATCH).
+
+ :meth:`.request` is for making any kind of request, it will look up the
+ appropriate encoding format and use one of the above two methods to make
+ the request.
+
+ Initializer parameters:
+
+ :param headers:
+ Headers to include with all requests, unless other headers are given
+ explicitly.
+ """
+
+ _encode_url_methods = {"DELETE", "GET", "HEAD", "OPTIONS"}
+
+ def __init__(self, headers=None):
+ self.headers = headers or {}
+
+ def urlopen(
+ self,
+ method,
+ url,
+ body=None,
+ headers=None,
+ encode_multipart=True,
+ multipart_boundary=None,
+ **kw
+ ): # Abstract
+ raise NotImplementedError(
+ "Classes extending RequestMethods must implement "
+ "their own ``urlopen`` method."
+ )
+
+ def request(self, method, url, fields=None, headers=None, **urlopen_kw):
+ """
+ Make a request using :meth:`urlopen` with the appropriate encoding of
+ ``fields`` based on the ``method`` used.
+
+ This is a convenience method that requires the least amount of manual
+ effort. It can be used in most situations, while still having the
+ option to drop down to more specific methods when necessary, such as
+ :meth:`request_encode_url`, :meth:`request_encode_body`,
+ or even the lowest level :meth:`urlopen`.
+ """
+ method = method.upper()
+
+ urlopen_kw["request_url"] = url
+
+ if method in self._encode_url_methods:
+ return self.request_encode_url(
+ method, url, fields=fields, headers=headers, **urlopen_kw
+ )
+ else:
+ return self.request_encode_body(
+ method, url, fields=fields, headers=headers, **urlopen_kw
+ )
+
+ def request_encode_url(self, method, url, fields=None, headers=None, **urlopen_kw):
+ """
+ Make a request using :meth:`urlopen` with the ``fields`` encoded in
+ the url. This is useful for request methods like GET, HEAD, DELETE, etc.
+ """
+ if headers is None:
+ headers = self.headers
+
+ extra_kw = {"headers": headers}
+ extra_kw.update(urlopen_kw)
+
+ if fields:
+ url += "?" + urlencode(fields)
+
+ return self.urlopen(method, url, **extra_kw)
+
+ def request_encode_body(
+ self,
+ method,
+ url,
+ fields=None,
+ headers=None,
+ encode_multipart=True,
+ multipart_boundary=None,
+ **urlopen_kw
+ ):
+ """
+ Make a request using :meth:`urlopen` with the ``fields`` encoded in
+ the body. This is useful for request methods like POST, PUT, PATCH, etc.
+
+ When ``encode_multipart=True`` (default), then
+ :func:`urllib3.encode_multipart_formdata` is used to encode
+ the payload with the appropriate content type. Otherwise
+ :func:`urllib.parse.urlencode` is used with the
+ 'application/x-www-form-urlencoded' content type.
+
+ Multipart encoding must be used when posting files, and it's reasonably
+ safe to use it in other times too. However, it may break request
+ signing, such as with OAuth.
+
+ Supports an optional ``fields`` parameter of key/value strings AND
+ key/filetuple. A filetuple is a (filename, data, MIME type) tuple where
+ the MIME type is optional. For example::
+
+ fields = {
+ 'foo': 'bar',
+ 'fakefile': ('foofile.txt', 'contents of foofile'),
+ 'realfile': ('barfile.txt', open('realfile').read()),
+ 'typedfile': ('bazfile.bin', open('bazfile').read(),
+ 'image/jpeg'),
+ 'nonamefile': 'contents of nonamefile field',
+ }
+
+ When uploading a file, providing a filename (the first parameter of the
+ tuple) is optional but recommended to best mimic behavior of browsers.
+
+ Note that if ``headers`` are supplied, the 'Content-Type' header will
+ be overwritten because it depends on the dynamic random boundary string
+ which is used to compose the body of the request. The random boundary
+ string can be explicitly set with the ``multipart_boundary`` parameter.
+ """
+ if headers is None:
+ headers = self.headers
+
+ extra_kw = {"headers": {}}
+
+ if fields:
+ if "body" in urlopen_kw:
+ raise TypeError(
+ "request got values for both 'fields' and 'body', can only specify one."
+ )
+
+ if encode_multipart:
+ body, content_type = encode_multipart_formdata(
+ fields, boundary=multipart_boundary
+ )
+ else:
+ body, content_type = (
+ urlencode(fields),
+ "application/x-www-form-urlencoded",
+ )
+
+ extra_kw["body"] = body
+ extra_kw["headers"] = {"Content-Type": content_type}
+
+ extra_kw["headers"].update(headers)
+ extra_kw.update(urlopen_kw)
+
+ return self.urlopen(method, url, **extra_kw)
diff --git a/openpype/hosts/fusion/vendor/urllib3/response.py b/openpype/hosts/fusion/vendor/urllib3/response.py
new file mode 100644
index 0000000000..38693f4fc6
--- /dev/null
+++ b/openpype/hosts/fusion/vendor/urllib3/response.py
@@ -0,0 +1,821 @@
+from __future__ import absolute_import
+
+import io
+import logging
+import zlib
+from contextlib import contextmanager
+from socket import error as SocketError
+from socket import timeout as SocketTimeout
+
+try:
+ import brotli
+except ImportError:
+ brotli = None
+
+from ._collections import HTTPHeaderDict
+from .connection import BaseSSLError, HTTPException
+from .exceptions import (
+ BodyNotHttplibCompatible,
+ DecodeError,
+ HTTPError,
+ IncompleteRead,
+ InvalidChunkLength,
+ InvalidHeader,
+ ProtocolError,
+ ReadTimeoutError,
+ ResponseNotChunked,
+ SSLError,
+)
+from .packages import six
+from .util.response import is_fp_closed, is_response_to_head
+
+log = logging.getLogger(__name__)
+
+
+class DeflateDecoder(object):
+ def __init__(self):
+ self._first_try = True
+ self._data = b""
+ self._obj = zlib.decompressobj()
+
+ def __getattr__(self, name):
+ return getattr(self._obj, name)
+
+ def decompress(self, data):
+ if not data:
+ return data
+
+ if not self._first_try:
+ return self._obj.decompress(data)
+
+ self._data += data
+ try:
+ decompressed = self._obj.decompress(data)
+ if decompressed:
+ self._first_try = False
+ self._data = None
+ return decompressed
+ except zlib.error:
+ self._first_try = False
+ self._obj = zlib.decompressobj(-zlib.MAX_WBITS)
+ try:
+ return self.decompress(self._data)
+ finally:
+ self._data = None
+
+
+class GzipDecoderState(object):
+
+ FIRST_MEMBER = 0
+ OTHER_MEMBERS = 1
+ SWALLOW_DATA = 2
+
+
+class GzipDecoder(object):
+ def __init__(self):
+ self._obj = zlib.decompressobj(16 + zlib.MAX_WBITS)
+ self._state = GzipDecoderState.FIRST_MEMBER
+
+ def __getattr__(self, name):
+ return getattr(self._obj, name)
+
+ def decompress(self, data):
+ ret = bytearray()
+ if self._state == GzipDecoderState.SWALLOW_DATA or not data:
+ return bytes(ret)
+ while True:
+ try:
+ ret += self._obj.decompress(data)
+ except zlib.error:
+ previous_state = self._state
+ # Ignore data after the first error
+ self._state = GzipDecoderState.SWALLOW_DATA
+ if previous_state == GzipDecoderState.OTHER_MEMBERS:
+ # Allow trailing garbage acceptable in other gzip clients
+ return bytes(ret)
+ raise
+ data = self._obj.unused_data
+ if not data:
+ return bytes(ret)
+ self._state = GzipDecoderState.OTHER_MEMBERS
+ self._obj = zlib.decompressobj(16 + zlib.MAX_WBITS)
+
+
+if brotli is not None:
+
+ class BrotliDecoder(object):
+ # Supports both 'brotlipy' and 'Brotli' packages
+ # since they share an import name. The top branches
+ # are for 'brotlipy' and bottom branches for 'Brotli'
+ def __init__(self):
+ self._obj = brotli.Decompressor()
+ if hasattr(self._obj, "decompress"):
+ self.decompress = self._obj.decompress
+ else:
+ self.decompress = self._obj.process
+
+ def flush(self):
+ if hasattr(self._obj, "flush"):
+ return self._obj.flush()
+ return b""
+
+
+class MultiDecoder(object):
+ """
+ From RFC7231:
+ If one or more encodings have been applied to a representation, the
+ sender that applied the encodings MUST generate a Content-Encoding
+ header field that lists the content codings in the order in which
+ they were applied.
+ """
+
+ def __init__(self, modes):
+ self._decoders = [_get_decoder(m.strip()) for m in modes.split(",")]
+
+ def flush(self):
+ return self._decoders[0].flush()
+
+ def decompress(self, data):
+ for d in reversed(self._decoders):
+ data = d.decompress(data)
+ return data
+
+
+def _get_decoder(mode):
+ if "," in mode:
+ return MultiDecoder(mode)
+
+ if mode == "gzip":
+ return GzipDecoder()
+
+ if brotli is not None and mode == "br":
+ return BrotliDecoder()
+
+ return DeflateDecoder()
+
+
+class HTTPResponse(io.IOBase):
+ """
+ HTTP Response container.
+
+ Backwards-compatible with :class:`http.client.HTTPResponse` but the response ``body`` is
+ loaded and decoded on-demand when the ``data`` property is accessed. This
+ class is also compatible with the Python standard library's :mod:`io`
+ module, and can hence be treated as a readable object in the context of that
+ framework.
+
+ Extra parameters for behaviour not present in :class:`http.client.HTTPResponse`:
+
+ :param preload_content:
+ If True, the response's body will be preloaded during construction.
+
+ :param decode_content:
+ If True, will attempt to decode the body based on the
+ 'content-encoding' header.
+
+ :param original_response:
+ When this HTTPResponse wrapper is generated from an :class:`http.client.HTTPResponse`
+ object, it's convenient to include the original for debug purposes. It's
+ otherwise unused.
+
+ :param retries:
+ The retries contains the last :class:`~urllib3.util.retry.Retry` that
+ was used during the request.
+
+ :param enforce_content_length:
+ Enforce content length checking. Body returned by server must match
+ value of Content-Length header, if present. Otherwise, raise error.
+ """
+
+ CONTENT_DECODERS = ["gzip", "deflate"]
+ if brotli is not None:
+ CONTENT_DECODERS += ["br"]
+ REDIRECT_STATUSES = [301, 302, 303, 307, 308]
+
+ def __init__(
+ self,
+ body="",
+ headers=None,
+ status=0,
+ version=0,
+ reason=None,
+ strict=0,
+ preload_content=True,
+ decode_content=True,
+ original_response=None,
+ pool=None,
+ connection=None,
+ msg=None,
+ retries=None,
+ enforce_content_length=False,
+ request_method=None,
+ request_url=None,
+ auto_close=True,
+ ):
+
+ if isinstance(headers, HTTPHeaderDict):
+ self.headers = headers
+ else:
+ self.headers = HTTPHeaderDict(headers)
+ self.status = status
+ self.version = version
+ self.reason = reason
+ self.strict = strict
+ self.decode_content = decode_content
+ self.retries = retries
+ self.enforce_content_length = enforce_content_length
+ self.auto_close = auto_close
+
+ self._decoder = None
+ self._body = None
+ self._fp = None
+ self._original_response = original_response
+ self._fp_bytes_read = 0
+ self.msg = msg
+ self._request_url = request_url
+
+ if body and isinstance(body, (six.string_types, bytes)):
+ self._body = body
+
+ self._pool = pool
+ self._connection = connection
+
+ if hasattr(body, "read"):
+ self._fp = body
+
+ # Are we using the chunked-style of transfer encoding?
+ self.chunked = False
+ self.chunk_left = None
+ tr_enc = self.headers.get("transfer-encoding", "").lower()
+ # Don't incur the penalty of creating a list and then discarding it
+ encodings = (enc.strip() for enc in tr_enc.split(","))
+ if "chunked" in encodings:
+ self.chunked = True
+
+ # Determine length of response
+ self.length_remaining = self._init_length(request_method)
+
+ # If requested, preload the body.
+ if preload_content and not self._body:
+ self._body = self.read(decode_content=decode_content)
+
+ def get_redirect_location(self):
+ """
+ Should we redirect and where to?
+
+ :returns: Truthy redirect location string if we got a redirect status
+ code and valid location. ``None`` if redirect status and no
+ location. ``False`` if not a redirect status code.
+ """
+ if self.status in self.REDIRECT_STATUSES:
+ return self.headers.get("location")
+
+ return False
+
+ def release_conn(self):
+ if not self._pool or not self._connection:
+ return
+
+ self._pool._put_conn(self._connection)
+ self._connection = None
+
+ def drain_conn(self):
+ """
+ Read and discard any remaining HTTP response data in the response connection.
+
+ Unread data in the HTTPResponse connection blocks the connection from being released back to the pool.
+ """
+ try:
+ self.read()
+ except (HTTPError, SocketError, BaseSSLError, HTTPException):
+ pass
+
+ @property
+ def data(self):
+ # For backwards-compat with earlier urllib3 0.4 and earlier.
+ if self._body:
+ return self._body
+
+ if self._fp:
+ return self.read(cache_content=True)
+
+ @property
+ def connection(self):
+ return self._connection
+
+ def isclosed(self):
+ return is_fp_closed(self._fp)
+
+ def tell(self):
+ """
+ Obtain the number of bytes pulled over the wire so far. May differ from
+ the amount of content returned by :meth:``urllib3.response.HTTPResponse.read``
+ if bytes are encoded on the wire (e.g, compressed).
+ """
+ return self._fp_bytes_read
+
+ def _init_length(self, request_method):
+ """
+ Set initial length value for Response content if available.
+ """
+ length = self.headers.get("content-length")
+
+ if length is not None:
+ if self.chunked:
+ # This Response will fail with an IncompleteRead if it can't be
+ # received as chunked. This method falls back to attempt reading
+ # the response before raising an exception.
+ log.warning(
+ "Received response with both Content-Length and "
+ "Transfer-Encoding set. This is expressly forbidden "
+ "by RFC 7230 sec 3.3.2. Ignoring Content-Length and "
+ "attempting to process response as Transfer-Encoding: "
+ "chunked."
+ )
+ return None
+
+ try:
+ # RFC 7230 section 3.3.2 specifies multiple content lengths can
+ # be sent in a single Content-Length header
+ # (e.g. Content-Length: 42, 42). This line ensures the values
+ # are all valid ints and that as long as the `set` length is 1,
+ # all values are the same. Otherwise, the header is invalid.
+ lengths = set([int(val) for val in length.split(",")])
+ if len(lengths) > 1:
+ raise InvalidHeader(
+ "Content-Length contained multiple "
+ "unmatching values (%s)" % length
+ )
+ length = lengths.pop()
+ except ValueError:
+ length = None
+ else:
+ if length < 0:
+ length = None
+
+ # Convert status to int for comparison
+ # In some cases, httplib returns a status of "_UNKNOWN"
+ try:
+ status = int(self.status)
+ except ValueError:
+ status = 0
+
+ # Check for responses that shouldn't include a body
+ if status in (204, 304) or 100 <= status < 200 or request_method == "HEAD":
+ length = 0
+
+ return length
+
+ def _init_decoder(self):
+ """
+ Set-up the _decoder attribute if necessary.
+ """
+ # Note: content-encoding value should be case-insensitive, per RFC 7230
+ # Section 3.2
+ content_encoding = self.headers.get("content-encoding", "").lower()
+ if self._decoder is None:
+ if content_encoding in self.CONTENT_DECODERS:
+ self._decoder = _get_decoder(content_encoding)
+ elif "," in content_encoding:
+ encodings = [
+ e.strip()
+ for e in content_encoding.split(",")
+ if e.strip() in self.CONTENT_DECODERS
+ ]
+ if len(encodings):
+ self._decoder = _get_decoder(content_encoding)
+
+ DECODER_ERROR_CLASSES = (IOError, zlib.error)
+ if brotli is not None:
+ DECODER_ERROR_CLASSES += (brotli.error,)
+
+ def _decode(self, data, decode_content, flush_decoder):
+ """
+ Decode the data passed in and potentially flush the decoder.
+ """
+ if not decode_content:
+ return data
+
+ try:
+ if self._decoder:
+ data = self._decoder.decompress(data)
+ except self.DECODER_ERROR_CLASSES as e:
+ content_encoding = self.headers.get("content-encoding", "").lower()
+ raise DecodeError(
+ "Received response with content-encoding: %s, but "
+ "failed to decode it." % content_encoding,
+ e,
+ )
+ if flush_decoder:
+ data += self._flush_decoder()
+
+ return data
+
+ def _flush_decoder(self):
+ """
+ Flushes the decoder. Should only be called if the decoder is actually
+ being used.
+ """
+ if self._decoder:
+ buf = self._decoder.decompress(b"")
+ return buf + self._decoder.flush()
+
+ return b""
+
+ @contextmanager
+ def _error_catcher(self):
+ """
+ Catch low-level python exceptions, instead re-raising urllib3
+ variants, so that low-level exceptions are not leaked in the
+ high-level api.
+
+ On exit, release the connection back to the pool.
+ """
+ clean_exit = False
+
+ try:
+ try:
+ yield
+
+ except SocketTimeout:
+ # FIXME: Ideally we'd like to include the url in the ReadTimeoutError but
+ # there is yet no clean way to get at it from this context.
+ raise ReadTimeoutError(self._pool, None, "Read timed out.")
+
+ except BaseSSLError as e:
+ # FIXME: Is there a better way to differentiate between SSLErrors?
+ if "read operation timed out" not in str(e):
+ # SSL errors related to framing/MAC get wrapped and reraised here
+ raise SSLError(e)
+
+ raise ReadTimeoutError(self._pool, None, "Read timed out.")
+
+ except (HTTPException, SocketError) as e:
+ # This includes IncompleteRead.
+ raise ProtocolError("Connection broken: %r" % e, e)
+
+ # If no exception is thrown, we should avoid cleaning up
+ # unnecessarily.
+ clean_exit = True
+ finally:
+ # If we didn't terminate cleanly, we need to throw away our
+ # connection.
+ if not clean_exit:
+ # The response may not be closed but we're not going to use it
+ # anymore so close it now to ensure that the connection is
+ # released back to the pool.
+ if self._original_response:
+ self._original_response.close()
+
+ # Closing the response may not actually be sufficient to close
+ # everything, so if we have a hold of the connection close that
+ # too.
+ if self._connection:
+ self._connection.close()
+
+ # If we hold the original response but it's closed now, we should
+ # return the connection back to the pool.
+ if self._original_response and self._original_response.isclosed():
+ self.release_conn()
+
+ def read(self, amt=None, decode_content=None, cache_content=False):
+ """
+ Similar to :meth:`http.client.HTTPResponse.read`, but with two additional
+ parameters: ``decode_content`` and ``cache_content``.
+
+ :param amt:
+ How much of the content to read. If specified, caching is skipped
+ because it doesn't make sense to cache partial content as the full
+ response.
+
+ :param decode_content:
+ If True, will attempt to decode the body based on the
+ 'content-encoding' header.
+
+ :param cache_content:
+ If True, will save the returned data such that the same result is
+ returned despite of the state of the underlying file object. This
+ is useful if you want the ``.data`` property to continue working
+ after having ``.read()`` the file object. (Overridden if ``amt`` is
+ set.)
+ """
+ self._init_decoder()
+ if decode_content is None:
+ decode_content = self.decode_content
+
+ if self._fp is None:
+ return
+
+ flush_decoder = False
+ fp_closed = getattr(self._fp, "closed", False)
+
+ with self._error_catcher():
+ if amt is None:
+ # cStringIO doesn't like amt=None
+ data = self._fp.read() if not fp_closed else b""
+ flush_decoder = True
+ else:
+ cache_content = False
+ data = self._fp.read(amt) if not fp_closed else b""
+ if (
+ amt != 0 and not data
+ ): # Platform-specific: Buggy versions of Python.
+ # Close the connection when no data is returned
+ #
+ # This is redundant to what httplib/http.client _should_
+ # already do. However, versions of python released before
+ # December 15, 2012 (http://bugs.python.org/issue16298) do
+ # not properly close the connection in all cases. There is
+ # no harm in redundantly calling close.
+ self._fp.close()
+ flush_decoder = True
+ if self.enforce_content_length and self.length_remaining not in (
+ 0,
+ None,
+ ):
+ # This is an edge case that httplib failed to cover due
+ # to concerns of backward compatibility. We're
+ # addressing it here to make sure IncompleteRead is
+ # raised during streaming, so all calls with incorrect
+ # Content-Length are caught.
+ raise IncompleteRead(self._fp_bytes_read, self.length_remaining)
+
+ if data:
+ self._fp_bytes_read += len(data)
+ if self.length_remaining is not None:
+ self.length_remaining -= len(data)
+
+ data = self._decode(data, decode_content, flush_decoder)
+
+ if cache_content:
+ self._body = data
+
+ return data
+
+ def stream(self, amt=2 ** 16, decode_content=None):
+ """
+ A generator wrapper for the read() method. A call will block until
+ ``amt`` bytes have been read from the connection or until the
+ connection is closed.
+
+ :param amt:
+ How much of the content to read. The generator will return up to
+ much data per iteration, but may return less. This is particularly
+ likely when using compressed data. However, the empty string will
+ never be returned.
+
+ :param decode_content:
+ If True, will attempt to decode the body based on the
+ 'content-encoding' header.
+ """
+ if self.chunked and self.supports_chunked_reads():
+ for line in self.read_chunked(amt, decode_content=decode_content):
+ yield line
+ else:
+ while not is_fp_closed(self._fp):
+ data = self.read(amt=amt, decode_content=decode_content)
+
+ if data:
+ yield data
+
+ @classmethod
+ def from_httplib(ResponseCls, r, **response_kw):
+ """
+ Given an :class:`http.client.HTTPResponse` instance ``r``, return a
+ corresponding :class:`urllib3.response.HTTPResponse` object.
+
+ Remaining parameters are passed to the HTTPResponse constructor, along
+ with ``original_response=r``.
+ """
+ headers = r.msg
+
+ if not isinstance(headers, HTTPHeaderDict):
+ if six.PY2:
+ # Python 2.7
+ headers = HTTPHeaderDict.from_httplib(headers)
+ else:
+ headers = HTTPHeaderDict(headers.items())
+
+ # HTTPResponse objects in Python 3 don't have a .strict attribute
+ strict = getattr(r, "strict", 0)
+ resp = ResponseCls(
+ body=r,
+ headers=headers,
+ status=r.status,
+ version=r.version,
+ reason=r.reason,
+ strict=strict,
+ original_response=r,
+ **response_kw
+ )
+ return resp
+
+ # Backwards-compatibility methods for http.client.HTTPResponse
+ def getheaders(self):
+ return self.headers
+
+ def getheader(self, name, default=None):
+ return self.headers.get(name, default)
+
+ # Backwards compatibility for http.cookiejar
+ def info(self):
+ return self.headers
+
+ # Overrides from io.IOBase
+ def close(self):
+ if not self.closed:
+ self._fp.close()
+
+ if self._connection:
+ self._connection.close()
+
+ if not self.auto_close:
+ io.IOBase.close(self)
+
+ @property
+ def closed(self):
+ if not self.auto_close:
+ return io.IOBase.closed.__get__(self)
+ elif self._fp is None:
+ return True
+ elif hasattr(self._fp, "isclosed"):
+ return self._fp.isclosed()
+ elif hasattr(self._fp, "closed"):
+ return self._fp.closed
+ else:
+ return True
+
+ def fileno(self):
+ if self._fp is None:
+ raise IOError("HTTPResponse has no file to get a fileno from")
+ elif hasattr(self._fp, "fileno"):
+ return self._fp.fileno()
+ else:
+ raise IOError(
+ "The file-like object this HTTPResponse is wrapped "
+ "around has no file descriptor"
+ )
+
+ def flush(self):
+ if (
+ self._fp is not None
+ and hasattr(self._fp, "flush")
+ and not getattr(self._fp, "closed", False)
+ ):
+ return self._fp.flush()
+
+ def readable(self):
+ # This method is required for `io` module compatibility.
+ return True
+
+ def readinto(self, b):
+ # This method is required for `io` module compatibility.
+ temp = self.read(len(b))
+ if len(temp) == 0:
+ return 0
+ else:
+ b[: len(temp)] = temp
+ return len(temp)
+
+ def supports_chunked_reads(self):
+ """
+ Checks if the underlying file-like object looks like a
+ :class:`http.client.HTTPResponse` object. We do this by testing for
+ the fp attribute. If it is present we assume it returns raw chunks as
+ processed by read_chunked().
+ """
+ return hasattr(self._fp, "fp")
+
+ def _update_chunk_length(self):
+ # First, we'll figure out length of a chunk and then
+ # we'll try to read it from socket.
+ if self.chunk_left is not None:
+ return
+ line = self._fp.fp.readline()
+ line = line.split(b";", 1)[0]
+ try:
+ self.chunk_left = int(line, 16)
+ except ValueError:
+ # Invalid chunked protocol response, abort.
+ self.close()
+ raise InvalidChunkLength(self, line)
+
+ def _handle_chunk(self, amt):
+ returned_chunk = None
+ if amt is None:
+ chunk = self._fp._safe_read(self.chunk_left)
+ returned_chunk = chunk
+ self._fp._safe_read(2) # Toss the CRLF at the end of the chunk.
+ self.chunk_left = None
+ elif amt < self.chunk_left:
+ value = self._fp._safe_read(amt)
+ self.chunk_left = self.chunk_left - amt
+ returned_chunk = value
+ elif amt == self.chunk_left:
+ value = self._fp._safe_read(amt)
+ self._fp._safe_read(2) # Toss the CRLF at the end of the chunk.
+ self.chunk_left = None
+ returned_chunk = value
+ else: # amt > self.chunk_left
+ returned_chunk = self._fp._safe_read(self.chunk_left)
+ self._fp._safe_read(2) # Toss the CRLF at the end of the chunk.
+ self.chunk_left = None
+ return returned_chunk
+
+ def read_chunked(self, amt=None, decode_content=None):
+ """
+ Similar to :meth:`HTTPResponse.read`, but with an additional
+ parameter: ``decode_content``.
+
+ :param amt:
+ How much of the content to read. If specified, caching is skipped
+ because it doesn't make sense to cache partial content as the full
+ response.
+
+ :param decode_content:
+ If True, will attempt to decode the body based on the
+ 'content-encoding' header.
+ """
+ self._init_decoder()
+ # FIXME: Rewrite this method and make it a class with a better structured logic.
+ if not self.chunked:
+ raise ResponseNotChunked(
+ "Response is not chunked. "
+ "Header 'transfer-encoding: chunked' is missing."
+ )
+ if not self.supports_chunked_reads():
+ raise BodyNotHttplibCompatible(
+ "Body should be http.client.HTTPResponse like. "
+ "It should have have an fp attribute which returns raw chunks."
+ )
+
+ with self._error_catcher():
+ # Don't bother reading the body of a HEAD request.
+ if self._original_response and is_response_to_head(self._original_response):
+ self._original_response.close()
+ return
+
+ # If a response is already read and closed
+ # then return immediately.
+ if self._fp.fp is None:
+ return
+
+ while True:
+ self._update_chunk_length()
+ if self.chunk_left == 0:
+ break
+ chunk = self._handle_chunk(amt)
+ decoded = self._decode(
+ chunk, decode_content=decode_content, flush_decoder=False
+ )
+ if decoded:
+ yield decoded
+
+ if decode_content:
+ # On CPython and PyPy, we should never need to flush the
+ # decoder. However, on Jython we *might* need to, so
+ # lets defensively do it anyway.
+ decoded = self._flush_decoder()
+ if decoded: # Platform-specific: Jython.
+ yield decoded
+
+ # Chunk content ends with \r\n: discard it.
+ while True:
+ line = self._fp.fp.readline()
+ if not line:
+ # Some sites may not end with '\r\n'.
+ break
+ if line == b"\r\n":
+ break
+
+ # We read everything; close the "file".
+ if self._original_response:
+ self._original_response.close()
+
+ def geturl(self):
+ """
+ Returns the URL that was the source of this response.
+ If the request that generated this response redirected, this method
+ will return the final redirect location.
+ """
+ if self.retries is not None and len(self.retries.history):
+ return self.retries.history[-1].redirect_location
+ else:
+ return self._request_url
+
+ def __iter__(self):
+ buffer = []
+ for chunk in self.stream(decode_content=True):
+ if b"\n" in chunk:
+ chunk = chunk.split(b"\n")
+ yield b"".join(buffer) + chunk[0] + b"\n"
+ for x in chunk[1:-1]:
+ yield x + b"\n"
+ if chunk[-1]:
+ buffer = [chunk[-1]]
+ else:
+ buffer = []
+ else:
+ buffer.append(chunk)
+ if buffer:
+ yield b"".join(buffer)
diff --git a/openpype/hosts/fusion/vendor/urllib3/util/__init__.py b/openpype/hosts/fusion/vendor/urllib3/util/__init__.py
new file mode 100644
index 0000000000..4547fc522b
--- /dev/null
+++ b/openpype/hosts/fusion/vendor/urllib3/util/__init__.py
@@ -0,0 +1,49 @@
+from __future__ import absolute_import
+
+# For backwards compatibility, provide imports that used to be here.
+from .connection import is_connection_dropped
+from .request import SKIP_HEADER, SKIPPABLE_HEADERS, make_headers
+from .response import is_fp_closed
+from .retry import Retry
+from .ssl_ import (
+ ALPN_PROTOCOLS,
+ HAS_SNI,
+ IS_PYOPENSSL,
+ IS_SECURETRANSPORT,
+ PROTOCOL_TLS,
+ SSLContext,
+ assert_fingerprint,
+ resolve_cert_reqs,
+ resolve_ssl_version,
+ ssl_wrap_socket,
+)
+from .timeout import Timeout, current_time
+from .url import Url, get_host, parse_url, split_first
+from .wait import wait_for_read, wait_for_write
+
+__all__ = (
+ "HAS_SNI",
+ "IS_PYOPENSSL",
+ "IS_SECURETRANSPORT",
+ "SSLContext",
+ "PROTOCOL_TLS",
+ "ALPN_PROTOCOLS",
+ "Retry",
+ "Timeout",
+ "Url",
+ "assert_fingerprint",
+ "current_time",
+ "is_connection_dropped",
+ "is_fp_closed",
+ "get_host",
+ "parse_url",
+ "make_headers",
+ "resolve_cert_reqs",
+ "resolve_ssl_version",
+ "split_first",
+ "ssl_wrap_socket",
+ "wait_for_read",
+ "wait_for_write",
+ "SKIP_HEADER",
+ "SKIPPABLE_HEADERS",
+)
diff --git a/openpype/hosts/fusion/vendor/urllib3/util/connection.py b/openpype/hosts/fusion/vendor/urllib3/util/connection.py
new file mode 100644
index 0000000000..bdc240c50c
--- /dev/null
+++ b/openpype/hosts/fusion/vendor/urllib3/util/connection.py
@@ -0,0 +1,150 @@
+from __future__ import absolute_import
+
+import socket
+
+from urllib3.exceptions import LocationParseError
+
+from ..contrib import _appengine_environ
+from ..packages import six
+from .wait import NoWayToWaitForSocketError, wait_for_read
+
+
+def is_connection_dropped(conn): # Platform-specific
+ """
+ Returns True if the connection is dropped and should be closed.
+
+ :param conn:
+ :class:`http.client.HTTPConnection` object.
+
+ Note: For platforms like AppEngine, this will always return ``False`` to
+ let the platform handle connection recycling transparently for us.
+ """
+ sock = getattr(conn, "sock", False)
+ if sock is False: # Platform-specific: AppEngine
+ return False
+ if sock is None: # Connection already closed (such as by httplib).
+ return True
+ try:
+ # Returns True if readable, which here means it's been dropped
+ return wait_for_read(sock, timeout=0.0)
+ except NoWayToWaitForSocketError: # Platform-specific: AppEngine
+ return False
+
+
+# This function is copied from socket.py in the Python 2.7 standard
+# library test suite. Added to its signature is only `socket_options`.
+# One additional modification is that we avoid binding to IPv6 servers
+# discovered in DNS if the system doesn't have IPv6 functionality.
+def create_connection(
+ address,
+ timeout=socket._GLOBAL_DEFAULT_TIMEOUT,
+ source_address=None,
+ socket_options=None,
+):
+ """Connect to *address* and return the socket object.
+
+ Convenience function. Connect to *address* (a 2-tuple ``(host,
+ port)``) and return the socket object. Passing the optional
+ *timeout* parameter will set the timeout on the socket instance
+ before attempting to connect. If no *timeout* is supplied, the
+ global default timeout setting returned by :func:`socket.getdefaulttimeout`
+ is used. If *source_address* is set it must be a tuple of (host, port)
+ for the socket to bind as a source address before making the connection.
+ An host of '' or port 0 tells the OS to use the default.
+ """
+
+ host, port = address
+ if host.startswith("["):
+ host = host.strip("[]")
+ err = None
+
+ # Using the value from allowed_gai_family() in the context of getaddrinfo lets
+ # us select whether to work with IPv4 DNS records, IPv6 records, or both.
+ # The original create_connection function always returns all records.
+ family = allowed_gai_family()
+
+ try:
+ host.encode("idna")
+ except UnicodeError:
+ return six.raise_from(
+ LocationParseError(u"'%s', label empty or too long" % host), None
+ )
+
+ for res in socket.getaddrinfo(host, port, family, socket.SOCK_STREAM):
+ af, socktype, proto, canonname, sa = res
+ sock = None
+ try:
+ sock = socket.socket(af, socktype, proto)
+
+ # If provided, set socket level options before connecting.
+ _set_socket_options(sock, socket_options)
+
+ if timeout is not socket._GLOBAL_DEFAULT_TIMEOUT:
+ sock.settimeout(timeout)
+ if source_address:
+ sock.bind(source_address)
+ sock.connect(sa)
+ return sock
+
+ except socket.error as e:
+ err = e
+ if sock is not None:
+ sock.close()
+ sock = None
+
+ if err is not None:
+ raise err
+
+ raise socket.error("getaddrinfo returns an empty list")
+
+
+def _set_socket_options(sock, options):
+ if options is None:
+ return
+
+ for opt in options:
+ sock.setsockopt(*opt)
+
+
+def allowed_gai_family():
+ """This function is designed to work in the context of
+ getaddrinfo, where family=socket.AF_UNSPEC is the default and
+ will perform a DNS search for both IPv6 and IPv4 records."""
+
+ family = socket.AF_INET
+ if HAS_IPV6:
+ family = socket.AF_UNSPEC
+ return family
+
+
+def _has_ipv6(host):
+ """Returns True if the system can bind an IPv6 address."""
+ sock = None
+ has_ipv6 = False
+
+ # App Engine doesn't support IPV6 sockets and actually has a quota on the
+ # number of sockets that can be used, so just early out here instead of
+ # creating a socket needlessly.
+ # See https://github.com/urllib3/urllib3/issues/1446
+ if _appengine_environ.is_appengine_sandbox():
+ return False
+
+ if socket.has_ipv6:
+ # has_ipv6 returns true if cPython was compiled with IPv6 support.
+ # It does not tell us if the system has IPv6 support enabled. To
+ # determine that we must bind to an IPv6 address.
+ # https://github.com/urllib3/urllib3/pull/611
+ # https://bugs.python.org/issue658327
+ try:
+ sock = socket.socket(socket.AF_INET6)
+ sock.bind((host, 0))
+ has_ipv6 = True
+ except Exception:
+ pass
+
+ if sock:
+ sock.close()
+ return has_ipv6
+
+
+HAS_IPV6 = _has_ipv6("::1")
diff --git a/openpype/hosts/fusion/vendor/urllib3/util/proxy.py b/openpype/hosts/fusion/vendor/urllib3/util/proxy.py
new file mode 100644
index 0000000000..34f884d5b3
--- /dev/null
+++ b/openpype/hosts/fusion/vendor/urllib3/util/proxy.py
@@ -0,0 +1,56 @@
+from .ssl_ import create_urllib3_context, resolve_cert_reqs, resolve_ssl_version
+
+
+def connection_requires_http_tunnel(
+ proxy_url=None, proxy_config=None, destination_scheme=None
+):
+ """
+ Returns True if the connection requires an HTTP CONNECT through the proxy.
+
+ :param URL proxy_url:
+ URL of the proxy.
+ :param ProxyConfig proxy_config:
+ Proxy configuration from poolmanager.py
+ :param str destination_scheme:
+ The scheme of the destination. (i.e https, http, etc)
+ """
+ # If we're not using a proxy, no way to use a tunnel.
+ if proxy_url is None:
+ return False
+
+ # HTTP destinations never require tunneling, we always forward.
+ if destination_scheme == "http":
+ return False
+
+ # Support for forwarding with HTTPS proxies and HTTPS destinations.
+ if (
+ proxy_url.scheme == "https"
+ and proxy_config
+ and proxy_config.use_forwarding_for_https
+ ):
+ return False
+
+ # Otherwise always use a tunnel.
+ return True
+
+
+def create_proxy_ssl_context(
+ ssl_version, cert_reqs, ca_certs=None, ca_cert_dir=None, ca_cert_data=None
+):
+ """
+ Generates a default proxy ssl context if one hasn't been provided by the
+ user.
+ """
+ ssl_context = create_urllib3_context(
+ ssl_version=resolve_ssl_version(ssl_version),
+ cert_reqs=resolve_cert_reqs(cert_reqs),
+ )
+ if (
+ not ca_certs
+ and not ca_cert_dir
+ and not ca_cert_data
+ and hasattr(ssl_context, "load_default_certs")
+ ):
+ ssl_context.load_default_certs()
+
+ return ssl_context
diff --git a/openpype/hosts/fusion/vendor/urllib3/util/queue.py b/openpype/hosts/fusion/vendor/urllib3/util/queue.py
new file mode 100644
index 0000000000..41784104ee
--- /dev/null
+++ b/openpype/hosts/fusion/vendor/urllib3/util/queue.py
@@ -0,0 +1,22 @@
+import collections
+
+from ..packages import six
+from ..packages.six.moves import queue
+
+if six.PY2:
+ # Queue is imported for side effects on MS Windows. See issue #229.
+ import Queue as _unused_module_Queue # noqa: F401
+
+
+class LifoQueue(queue.Queue):
+ def _init(self, _):
+ self.queue = collections.deque()
+
+ def _qsize(self, len=len):
+ return len(self.queue)
+
+ def _put(self, item):
+ self.queue.append(item)
+
+ def _get(self):
+ return self.queue.pop()
diff --git a/openpype/hosts/fusion/vendor/urllib3/util/request.py b/openpype/hosts/fusion/vendor/urllib3/util/request.py
new file mode 100644
index 0000000000..25103383ec
--- /dev/null
+++ b/openpype/hosts/fusion/vendor/urllib3/util/request.py
@@ -0,0 +1,143 @@
+from __future__ import absolute_import
+
+from base64 import b64encode
+
+from ..exceptions import UnrewindableBodyError
+from ..packages.six import b, integer_types
+
+# Pass as a value within ``headers`` to skip
+# emitting some HTTP headers that are added automatically.
+# The only headers that are supported are ``Accept-Encoding``,
+# ``Host``, and ``User-Agent``.
+SKIP_HEADER = "@@@SKIP_HEADER@@@"
+SKIPPABLE_HEADERS = frozenset(["accept-encoding", "host", "user-agent"])
+
+ACCEPT_ENCODING = "gzip,deflate"
+try:
+ import brotli as _unused_module_brotli # noqa: F401
+except ImportError:
+ pass
+else:
+ ACCEPT_ENCODING += ",br"
+
+_FAILEDTELL = object()
+
+
+def make_headers(
+ keep_alive=None,
+ accept_encoding=None,
+ user_agent=None,
+ basic_auth=None,
+ proxy_basic_auth=None,
+ disable_cache=None,
+):
+ """
+ Shortcuts for generating request headers.
+
+ :param keep_alive:
+ If ``True``, adds 'connection: keep-alive' header.
+
+ :param accept_encoding:
+ Can be a boolean, list, or string.
+ ``True`` translates to 'gzip,deflate'.
+ List will get joined by comma.
+ String will be used as provided.
+
+ :param user_agent:
+ String representing the user-agent you want, such as
+ "python-urllib3/0.6"
+
+ :param basic_auth:
+ Colon-separated username:password string for 'authorization: basic ...'
+ auth header.
+
+ :param proxy_basic_auth:
+ Colon-separated username:password string for 'proxy-authorization: basic ...'
+ auth header.
+
+ :param disable_cache:
+ If ``True``, adds 'cache-control: no-cache' header.
+
+ Example::
+
+ >>> make_headers(keep_alive=True, user_agent="Batman/1.0")
+ {'connection': 'keep-alive', 'user-agent': 'Batman/1.0'}
+ >>> make_headers(accept_encoding=True)
+ {'accept-encoding': 'gzip,deflate'}
+ """
+ headers = {}
+ if accept_encoding:
+ if isinstance(accept_encoding, str):
+ pass
+ elif isinstance(accept_encoding, list):
+ accept_encoding = ",".join(accept_encoding)
+ else:
+ accept_encoding = ACCEPT_ENCODING
+ headers["accept-encoding"] = accept_encoding
+
+ if user_agent:
+ headers["user-agent"] = user_agent
+
+ if keep_alive:
+ headers["connection"] = "keep-alive"
+
+ if basic_auth:
+ headers["authorization"] = "Basic " + b64encode(b(basic_auth)).decode("utf-8")
+
+ if proxy_basic_auth:
+ headers["proxy-authorization"] = "Basic " + b64encode(
+ b(proxy_basic_auth)
+ ).decode("utf-8")
+
+ if disable_cache:
+ headers["cache-control"] = "no-cache"
+
+ return headers
+
+
+def set_file_position(body, pos):
+ """
+ If a position is provided, move file to that point.
+ Otherwise, we'll attempt to record a position for future use.
+ """
+ if pos is not None:
+ rewind_body(body, pos)
+ elif getattr(body, "tell", None) is not None:
+ try:
+ pos = body.tell()
+ except (IOError, OSError):
+ # This differentiates from None, allowing us to catch
+ # a failed `tell()` later when trying to rewind the body.
+ pos = _FAILEDTELL
+
+ return pos
+
+
+def rewind_body(body, body_pos):
+ """
+ Attempt to rewind body to a certain position.
+ Primarily used for request redirects and retries.
+
+ :param body:
+ File-like object that supports seek.
+
+ :param int pos:
+ Position to seek to in file.
+ """
+ body_seek = getattr(body, "seek", None)
+ if body_seek is not None and isinstance(body_pos, integer_types):
+ try:
+ body_seek(body_pos)
+ except (IOError, OSError):
+ raise UnrewindableBodyError(
+ "An error occurred when rewinding request body for redirect/retry."
+ )
+ elif body_pos is _FAILEDTELL:
+ raise UnrewindableBodyError(
+ "Unable to record file position for rewinding "
+ "request body during a redirect/retry."
+ )
+ else:
+ raise ValueError(
+ "body_pos must be of type integer, instead it was %s." % type(body_pos)
+ )
diff --git a/openpype/hosts/fusion/vendor/urllib3/util/response.py b/openpype/hosts/fusion/vendor/urllib3/util/response.py
new file mode 100644
index 0000000000..5ea609cced
--- /dev/null
+++ b/openpype/hosts/fusion/vendor/urllib3/util/response.py
@@ -0,0 +1,107 @@
+from __future__ import absolute_import
+
+from email.errors import MultipartInvariantViolationDefect, StartBoundaryNotFoundDefect
+
+from ..exceptions import HeaderParsingError
+from ..packages.six.moves import http_client as httplib
+
+
+def is_fp_closed(obj):
+ """
+ Checks whether a given file-like object is closed.
+
+ :param obj:
+ The file-like object to check.
+ """
+
+ try:
+ # Check `isclosed()` first, in case Python3 doesn't set `closed`.
+ # GH Issue #928
+ return obj.isclosed()
+ except AttributeError:
+ pass
+
+ try:
+ # Check via the official file-like-object way.
+ return obj.closed
+ except AttributeError:
+ pass
+
+ try:
+ # Check if the object is a container for another file-like object that
+ # gets released on exhaustion (e.g. HTTPResponse).
+ return obj.fp is None
+ except AttributeError:
+ pass
+
+ raise ValueError("Unable to determine whether fp is closed.")
+
+
+def assert_header_parsing(headers):
+ """
+ Asserts whether all headers have been successfully parsed.
+ Extracts encountered errors from the result of parsing headers.
+
+ Only works on Python 3.
+
+ :param http.client.HTTPMessage headers: Headers to verify.
+
+ :raises urllib3.exceptions.HeaderParsingError:
+ If parsing errors are found.
+ """
+
+ # This will fail silently if we pass in the wrong kind of parameter.
+ # To make debugging easier add an explicit check.
+ if not isinstance(headers, httplib.HTTPMessage):
+ raise TypeError("expected httplib.Message, got {0}.".format(type(headers)))
+
+ defects = getattr(headers, "defects", None)
+ get_payload = getattr(headers, "get_payload", None)
+
+ unparsed_data = None
+ if get_payload:
+ # get_payload is actually email.message.Message.get_payload;
+ # we're only interested in the result if it's not a multipart message
+ if not headers.is_multipart():
+ payload = get_payload()
+
+ if isinstance(payload, (bytes, str)):
+ unparsed_data = payload
+ if defects:
+ # httplib is assuming a response body is available
+ # when parsing headers even when httplib only sends
+ # header data to parse_headers() This results in
+ # defects on multipart responses in particular.
+ # See: https://github.com/urllib3/urllib3/issues/800
+
+ # So we ignore the following defects:
+ # - StartBoundaryNotFoundDefect:
+ # The claimed start boundary was never found.
+ # - MultipartInvariantViolationDefect:
+ # A message claimed to be a multipart but no subparts were found.
+ defects = [
+ defect
+ for defect in defects
+ if not isinstance(
+ defect, (StartBoundaryNotFoundDefect, MultipartInvariantViolationDefect)
+ )
+ ]
+
+ if defects or unparsed_data:
+ raise HeaderParsingError(defects=defects, unparsed_data=unparsed_data)
+
+
+def is_response_to_head(response):
+ """
+ Checks whether the request of a response has been a HEAD-request.
+ Handles the quirks of AppEngine.
+
+ :param http.client.HTTPResponse response:
+ Response to check if the originating request
+ used 'HEAD' as a method.
+ """
+ # FIXME: Can we do this somehow without accessing private httplib _method?
+ method = response._method
+ if isinstance(method, int): # Platform-specific: Appengine
+ return method == 3
+ return method.upper() == "HEAD"
diff --git a/openpype/hosts/fusion/vendor/urllib3/util/retry.py b/openpype/hosts/fusion/vendor/urllib3/util/retry.py
new file mode 100644
index 0000000000..c7dc42f1d6
--- /dev/null
+++ b/openpype/hosts/fusion/vendor/urllib3/util/retry.py
@@ -0,0 +1,602 @@
+from __future__ import absolute_import
+
+import email
+import logging
+import re
+import time
+import warnings
+from collections import namedtuple
+from itertools import takewhile
+
+from ..exceptions import (
+ ConnectTimeoutError,
+ InvalidHeader,
+ MaxRetryError,
+ ProtocolError,
+ ProxyError,
+ ReadTimeoutError,
+ ResponseError,
+)
+from ..packages import six
+
+log = logging.getLogger(__name__)
+
+
+# Data structure for representing the metadata of requests that result in a retry.
+RequestHistory = namedtuple(
+ "RequestHistory", ["method", "url", "error", "status", "redirect_location"]
+)
+
+
+# TODO: In v2 we can remove this sentinel and metaclass with deprecated options.
+_Default = object()
+
+
+class _RetryMeta(type):
+ @property
+ def DEFAULT_METHOD_WHITELIST(cls):
+ warnings.warn(
+ "Using 'Retry.DEFAULT_METHOD_WHITELIST' is deprecated and "
+ "will be removed in v2.0. Use 'Retry.DEFAULT_ALLOWED_METHODS' instead",
+ DeprecationWarning,
+ )
+ return cls.DEFAULT_ALLOWED_METHODS
+
+ @DEFAULT_METHOD_WHITELIST.setter
+ def DEFAULT_METHOD_WHITELIST(cls, value):
+ warnings.warn(
+ "Using 'Retry.DEFAULT_METHOD_WHITELIST' is deprecated and "
+ "will be removed in v2.0. Use 'Retry.DEFAULT_ALLOWED_METHODS' instead",
+ DeprecationWarning,
+ )
+ cls.DEFAULT_ALLOWED_METHODS = value
+
+ @property
+ def DEFAULT_REDIRECT_HEADERS_BLACKLIST(cls):
+ warnings.warn(
+ "Using 'Retry.DEFAULT_REDIRECT_HEADERS_BLACKLIST' is deprecated and "
+ "will be removed in v2.0. Use 'Retry.DEFAULT_REMOVE_HEADERS_ON_REDIRECT' instead",
+ DeprecationWarning,
+ )
+ return cls.DEFAULT_REMOVE_HEADERS_ON_REDIRECT
+
+ @DEFAULT_REDIRECT_HEADERS_BLACKLIST.setter
+ def DEFAULT_REDIRECT_HEADERS_BLACKLIST(cls, value):
+ warnings.warn(
+ "Using 'Retry.DEFAULT_REDIRECT_HEADERS_BLACKLIST' is deprecated and "
+ "will be removed in v2.0. Use 'Retry.DEFAULT_REMOVE_HEADERS_ON_REDIRECT' instead",
+ DeprecationWarning,
+ )
+ cls.DEFAULT_REMOVE_HEADERS_ON_REDIRECT = value
+
+
+@six.add_metaclass(_RetryMeta)
+class Retry(object):
+ """Retry configuration.
+
+ Each retry attempt will create a new Retry object with updated values, so
+ they can be safely reused.
+
+ Retries can be defined as a default for a pool::
+
+ retries = Retry(connect=5, read=2, redirect=5)
+ http = PoolManager(retries=retries)
+ response = http.request('GET', 'http://example.com/')
+
+ Or per-request (which overrides the default for the pool)::
+
+ response = http.request('GET', 'http://example.com/', retries=Retry(10))
+
+ Retries can be disabled by passing ``False``::
+
+ response = http.request('GET', 'http://example.com/', retries=False)
+
+ Errors will be wrapped in :class:`~urllib3.exceptions.MaxRetryError` unless
+ retries are disabled, in which case the causing exception will be raised.
+
+ :param int total:
+ Total number of retries to allow. Takes precedence over other counts.
+
+ Set to ``None`` to remove this constraint and fall back on other
+ counts.
+
+ Set to ``0`` to fail on the first retry.
+
+ Set to ``False`` to disable and imply ``raise_on_redirect=False``.
+
+ :param int connect:
+ How many connection-related errors to retry on.
+
+ These are errors raised before the request is sent to the remote server,
+ which we assume has not triggered the server to process the request.
+
+ Set to ``0`` to fail on the first retry of this type.
+
+ :param int read:
+ How many times to retry on read errors.
+
+ These errors are raised after the request was sent to the server, so the
+ request may have side-effects.
+
+ Set to ``0`` to fail on the first retry of this type.
+
+ :param int redirect:
+ How many redirects to perform. Limit this to avoid infinite redirect
+ loops.
+
+ A redirect is a HTTP response with a status code 301, 302, 303, 307 or
+ 308.
+
+ Set to ``0`` to fail on the first retry of this type.
+
+ Set to ``False`` to disable and imply ``raise_on_redirect=False``.
+
+ :param int status:
+ How many times to retry on bad status codes.
+
+ These are retries made on responses, where status code matches
+ ``status_forcelist``.
+
+ Set to ``0`` to fail on the first retry of this type.
+
+ :param int other:
+ How many times to retry on other errors.
+
+ Other errors are errors that are not connect, read, redirect or status errors.
+ These errors might be raised after the request was sent to the server, so the
+ request might have side-effects.
+
+ Set to ``0`` to fail on the first retry of this type.
+
+ If ``total`` is not set, it's a good idea to set this to 0 to account
+ for unexpected edge cases and avoid infinite retry loops.
+
+ :param iterable allowed_methods:
+ Set of uppercased HTTP method verbs that we should retry on.
+
+ By default, we only retry on methods which are considered to be
+ idempotent (multiple requests with the same parameters end with the
+ same state). See :attr:`Retry.DEFAULT_ALLOWED_METHODS`.
+
+ Set to a ``False`` value to retry on any verb.
+
+ .. warning::
+
+ Previously this parameter was named ``method_whitelist``, that
+ usage is deprecated in v1.26.0 and will be removed in v2.0.
+
+ :param iterable status_forcelist:
+ A set of integer HTTP status codes that we should force a retry on.
+ A retry is initiated if the request method is in ``allowed_methods``
+ and the response status code is in ``status_forcelist``.
+
+ By default, this is disabled with ``None``.
+
+ :param float backoff_factor:
+ A backoff factor to apply between attempts after the second try
+ (most errors are resolved immediately by a second try without a
+ delay). urllib3 will sleep for::
+
+ {backoff factor} * (2 ** ({number of total retries} - 1))
+
+ seconds. If the backoff_factor is 0.1, then :func:`.sleep` will sleep
+ for [0.0s, 0.2s, 0.4s, ...] between retries. It will never be longer
+ than :attr:`Retry.BACKOFF_MAX`.
+
+ By default, backoff is disabled (set to 0).
+
+ :param bool raise_on_redirect: Whether, if the number of redirects is
+ exhausted, to raise a MaxRetryError, or to return a response with a
+ response code in the 3xx range.
+
+ :param bool raise_on_status: Similar meaning to ``raise_on_redirect``:
+ whether we should raise an exception, or return a response,
+ if status falls in ``status_forcelist`` range and retries have
+ been exhausted.
+
+ :param tuple history: The history of the request encountered during
+ each call to :meth:`~Retry.increment`. The list is in the order
+ the requests occurred. Each list item is of class :class:`RequestHistory`.
+
+ :param bool respect_retry_after_header:
+ Whether to respect Retry-After header on status codes defined as
+ :attr:`Retry.RETRY_AFTER_STATUS_CODES` or not.
+
+ :param iterable remove_headers_on_redirect:
+ Sequence of headers to remove from the request when a response
+ indicating a redirect is returned before firing off the redirected
+ request.
+ """
+
+ #: Default methods to be used for ``allowed_methods``
+ DEFAULT_ALLOWED_METHODS = frozenset(
+ ["HEAD", "GET", "PUT", "DELETE", "OPTIONS", "TRACE"]
+ )
+
+ #: Default status codes to be used for ``status_forcelist``
+ RETRY_AFTER_STATUS_CODES = frozenset([413, 429, 503])
+
+ #: Default headers to be used for ``remove_headers_on_redirect``
+ DEFAULT_REMOVE_HEADERS_ON_REDIRECT = frozenset(["Authorization"])
+
+ #: Maximum backoff time.
+ BACKOFF_MAX = 120
+
+ def __init__(
+ self,
+ total=10,
+ connect=None,
+ read=None,
+ redirect=None,
+ status=None,
+ other=None,
+ allowed_methods=_Default,
+ status_forcelist=None,
+ backoff_factor=0,
+ raise_on_redirect=True,
+ raise_on_status=True,
+ history=None,
+ respect_retry_after_header=True,
+ remove_headers_on_redirect=_Default,
+ # TODO: Deprecated, remove in v2.0
+ method_whitelist=_Default,
+ ):
+
+ if method_whitelist is not _Default:
+ if allowed_methods is not _Default:
+ raise ValueError(
+ "Using both 'allowed_methods' and "
+ "'method_whitelist' together is not allowed. "
+ "Instead only use 'allowed_methods'"
+ )
+ warnings.warn(
+ "Using 'method_whitelist' with Retry is deprecated and "
+ "will be removed in v2.0. Use 'allowed_methods' instead",
+ DeprecationWarning,
+ stacklevel=2,
+ )
+ allowed_methods = method_whitelist
+ if allowed_methods is _Default:
+ allowed_methods = self.DEFAULT_ALLOWED_METHODS
+ if remove_headers_on_redirect is _Default:
+ remove_headers_on_redirect = self.DEFAULT_REMOVE_HEADERS_ON_REDIRECT
+
+ self.total = total
+ self.connect = connect
+ self.read = read
+ self.status = status
+ self.other = other
+
+ if redirect is False or total is False:
+ redirect = 0
+ raise_on_redirect = False
+
+ self.redirect = redirect
+ self.status_forcelist = status_forcelist or set()
+ self.allowed_methods = allowed_methods
+ self.backoff_factor = backoff_factor
+ self.raise_on_redirect = raise_on_redirect
+ self.raise_on_status = raise_on_status
+ self.history = history or tuple()
+ self.respect_retry_after_header = respect_retry_after_header
+ self.remove_headers_on_redirect = frozenset(
+ [h.lower() for h in remove_headers_on_redirect]
+ )
+
+ def new(self, **kw):
+ params = dict(
+ total=self.total,
+ connect=self.connect,
+ read=self.read,
+ redirect=self.redirect,
+ status=self.status,
+ other=self.other,
+ status_forcelist=self.status_forcelist,
+ backoff_factor=self.backoff_factor,
+ raise_on_redirect=self.raise_on_redirect,
+ raise_on_status=self.raise_on_status,
+ history=self.history,
+ remove_headers_on_redirect=self.remove_headers_on_redirect,
+ respect_retry_after_header=self.respect_retry_after_header,
+ )
+
+ # TODO: If already given in **kw we use what's given to us
+ # If not given we need to figure out what to pass. We decide
+ # based on whether our class has the 'method_whitelist' property
+ # and if so we pass the deprecated 'method_whitelist' otherwise
+ # we use 'allowed_methods'. Remove in v2.0
+ if "method_whitelist" not in kw and "allowed_methods" not in kw:
+ if "method_whitelist" in self.__dict__:
+ warnings.warn(
+ "Using 'method_whitelist' with Retry is deprecated and "
+ "will be removed in v2.0. Use 'allowed_methods' instead",
+ DeprecationWarning,
+ )
+ params["method_whitelist"] = self.allowed_methods
+ else:
+ params["allowed_methods"] = self.allowed_methods
+
+ params.update(kw)
+ return type(self)(**params)
+
+ @classmethod
+ def from_int(cls, retries, redirect=True, default=None):
+ """Backwards-compatibility for the old retries format."""
+ if retries is None:
+ retries = default if default is not None else cls.DEFAULT
+
+ if isinstance(retries, Retry):
+ return retries
+
+ redirect = bool(redirect) and None
+ new_retries = cls(retries, redirect=redirect)
+ log.debug("Converted retries value: %r -> %r", retries, new_retries)
+ return new_retries
+
+ def get_backoff_time(self):
+ """Formula for computing the current backoff
+
+ :rtype: float
+ """
+ # We want to consider only the last consecutive errors sequence (Ignore redirects).
+ consecutive_errors_len = len(
+ list(
+ takewhile(lambda x: x.redirect_location is None, reversed(self.history))
+ )
+ )
+ if consecutive_errors_len <= 1:
+ return 0
+
+ backoff_value = self.backoff_factor * (2 ** (consecutive_errors_len - 1))
+ return min(self.BACKOFF_MAX, backoff_value)
+
+ def parse_retry_after(self, retry_after):
+ # Whitespace: https://tools.ietf.org/html/rfc7230#section-3.2.4
+ if re.match(r"^\s*[0-9]+\s*$", retry_after):
+ seconds = int(retry_after)
+ else:
+ retry_date_tuple = email.utils.parsedate_tz(retry_after)
+ if retry_date_tuple is None:
+ raise InvalidHeader("Invalid Retry-After header: %s" % retry_after)
+ if retry_date_tuple[9] is None: # Python 2
+ # Assume UTC if no timezone was specified
+ # On Python2.7, parsedate_tz returns None for a timezone offset
+ # instead of 0 if no timezone is given, where mktime_tz treats
+ # a None timezone offset as local time.
+ retry_date_tuple = retry_date_tuple[:9] + (0,) + retry_date_tuple[10:]
+
+ retry_date = email.utils.mktime_tz(retry_date_tuple)
+ seconds = retry_date - time.time()
+
+ if seconds < 0:
+ seconds = 0
+
+ return seconds
+
+ def get_retry_after(self, response):
+ """Get the value of Retry-After in seconds."""
+
+ retry_after = response.getheader("Retry-After")
+
+ if retry_after is None:
+ return None
+
+ return self.parse_retry_after(retry_after)
+
+ def sleep_for_retry(self, response=None):
+ retry_after = self.get_retry_after(response)
+ if retry_after:
+ time.sleep(retry_after)
+ return True
+
+ return False
+
+ def _sleep_backoff(self):
+ backoff = self.get_backoff_time()
+ if backoff <= 0:
+ return
+ time.sleep(backoff)
+
+ def sleep(self, response=None):
+ """Sleep between retry attempts.
+
+ This method will respect a server's ``Retry-After`` response header
+ and sleep the duration of the time requested. If that is not present, it
+ will use an exponential backoff. By default, the backoff factor is 0 and
+ this method will return immediately.
+ """
+
+ if self.respect_retry_after_header and response:
+ slept = self.sleep_for_retry(response)
+ if slept:
+ return
+
+ self._sleep_backoff()
+
+ def _is_connection_error(self, err):
+ """Errors when we're fairly sure that the server did not receive the
+ request, so it should be safe to retry.
+ """
+ if isinstance(err, ProxyError):
+ err = err.original_error
+ return isinstance(err, ConnectTimeoutError)
+
+ def _is_read_error(self, err):
+ """Errors that occur after the request has been started, so we should
+ assume that the server began processing it.
+ """
+ return isinstance(err, (ReadTimeoutError, ProtocolError))
+
+ def _is_method_retryable(self, method):
+ """Checks if a given HTTP method should be retried upon, depending if
+ it is included in the allowed_methods
+ """
+ # TODO: For now favor if the Retry implementation sets its own method_whitelist
+ # property outside of our constructor to avoid breaking custom implementations.
+ if "method_whitelist" in self.__dict__:
+ warnings.warn(
+ "Using 'method_whitelist' with Retry is deprecated and "
+ "will be removed in v2.0. Use 'allowed_methods' instead",
+ DeprecationWarning,
+ )
+ allowed_methods = self.method_whitelist
+ else:
+ allowed_methods = self.allowed_methods
+
+ if allowed_methods and method.upper() not in allowed_methods:
+ return False
+ return True
+
+ def is_retry(self, method, status_code, has_retry_after=False):
+ """Is this method/status code retryable? (Based on allowlists and control
+ variables such as the number of total retries to allow, whether to
+ respect the Retry-After header, whether this header is present, and
+ whether the returned status code is on the list of status codes to
+ be retried upon on the presence of the aforementioned header)
+ """
+ if not self._is_method_retryable(method):
+ return False
+
+ if self.status_forcelist and status_code in self.status_forcelist:
+ return True
+
+ return (
+ self.total
+ and self.respect_retry_after_header
+ and has_retry_after
+ and (status_code in self.RETRY_AFTER_STATUS_CODES)
+ )
+
+ def is_exhausted(self):
+ """Are we out of retries?"""
+ retry_counts = (
+ self.total,
+ self.connect,
+ self.read,
+ self.redirect,
+ self.status,
+ self.other,
+ )
+ retry_counts = list(filter(None, retry_counts))
+ if not retry_counts:
+ return False
+
+ return min(retry_counts) < 0
+
+ def increment(
+ self,
+ method=None,
+ url=None,
+ response=None,
+ error=None,
+ _pool=None,
+ _stacktrace=None,
+ ):
+ """Return a new Retry object with incremented retry counters.
+
+ :param response: A response object, or None, if the server did not
+ return a response.
+ :type response: :class:`~urllib3.response.HTTPResponse`
+ :param Exception error: An error encountered during the request, or
+ None if the response was received successfully.
+
+ :return: A new ``Retry`` object.
+ """
+ if self.total is False and error:
+ # Disabled, indicate to re-raise the error.
+ raise six.reraise(type(error), error, _stacktrace)
+
+ total = self.total
+ if total is not None:
+ total -= 1
+
+ connect = self.connect
+ read = self.read
+ redirect = self.redirect
+ status_count = self.status
+ other = self.other
+ cause = "unknown"
+ status = None
+ redirect_location = None
+
+ if error and self._is_connection_error(error):
+ # Connect retry?
+ if connect is False:
+ raise six.reraise(type(error), error, _stacktrace)
+ elif connect is not None:
+ connect -= 1
+
+ elif error and self._is_read_error(error):
+ # Read retry?
+ if read is False or not self._is_method_retryable(method):
+ raise six.reraise(type(error), error, _stacktrace)
+ elif read is not None:
+ read -= 1
+
+ elif error:
+ # Other retry?
+ if other is not None:
+ other -= 1
+
+ elif response and response.get_redirect_location():
+ # Redirect retry?
+ if redirect is not None:
+ redirect -= 1
+ cause = "too many redirects"
+ redirect_location = response.get_redirect_location()
+ status = response.status
+
+ else:
+ # Incrementing because of a server error like a 500 in
+ # status_forcelist and the given method is in the allowed_methods
+ cause = ResponseError.GENERIC_ERROR
+ if response and response.status:
+ if status_count is not None:
+ status_count -= 1
+ cause = ResponseError.SPECIFIC_ERROR.format(status_code=response.status)
+ status = response.status
+
+ history = self.history + (
+ RequestHistory(method, url, error, status, redirect_location),
+ )
+
+ new_retry = self.new(
+ total=total,
+ connect=connect,
+ read=read,
+ redirect=redirect,
+ status=status_count,
+ other=other,
+ history=history,
+ )
+
+ if new_retry.is_exhausted():
+ raise MaxRetryError(_pool, url, error or ResponseError(cause))
+
+ log.debug("Incremented Retry for (url='%s'): %r", url, new_retry)
+
+ return new_retry
+
+ def __repr__(self):
+ return (
+ "{cls.__name__}(total={self.total}, connect={self.connect}, "
+ "read={self.read}, redirect={self.redirect}, status={self.status})"
+ ).format(cls=type(self), self=self)
+
+ def __getattr__(self, item):
+ if item == "method_whitelist":
+ # TODO: Remove this deprecated alias in v2.0
+ warnings.warn(
+ "Using 'method_whitelist' with Retry is deprecated and "
+ "will be removed in v2.0. Use 'allowed_methods' instead",
+ DeprecationWarning,
+ )
+ return self.allowed_methods
+ try:
+ return getattr(super(Retry, self), item)
+ except AttributeError:
+ return getattr(Retry, item)
+
+
+# For backwards compatibility (equivalent to pre-v1.9):
+Retry.DEFAULT = Retry(3)
diff --git a/openpype/hosts/fusion/vendor/urllib3/util/ssl_.py b/openpype/hosts/fusion/vendor/urllib3/util/ssl_.py
new file mode 100644
index 0000000000..8f867812a5
--- /dev/null
+++ b/openpype/hosts/fusion/vendor/urllib3/util/ssl_.py
@@ -0,0 +1,495 @@
+from __future__ import absolute_import
+
+import hmac
+import os
+import sys
+import warnings
+from binascii import hexlify, unhexlify
+from hashlib import md5, sha1, sha256
+
+from ..exceptions import (
+ InsecurePlatformWarning,
+ ProxySchemeUnsupported,
+ SNIMissingWarning,
+ SSLError,
+)
+from ..packages import six
+from .url import BRACELESS_IPV6_ADDRZ_RE, IPV4_RE
+
+SSLContext = None
+SSLTransport = None
+HAS_SNI = False
+IS_PYOPENSSL = False
+IS_SECURETRANSPORT = False
+ALPN_PROTOCOLS = ["http/1.1"]
+
+# Maps the length of a digest to a possible hash function producing this digest
+HASHFUNC_MAP = {32: md5, 40: sha1, 64: sha256}
+
+
+def _const_compare_digest_backport(a, b):
+ """
+ Compare two digests of equal length in constant time.
+
+ The digests must be of type str/bytes.
+ Returns True if the digests match, and False otherwise.
+ """
+ result = abs(len(a) - len(b))
+ for left, right in zip(bytearray(a), bytearray(b)):
+ result |= left ^ right
+ return result == 0
+
+
+_const_compare_digest = getattr(hmac, "compare_digest", _const_compare_digest_backport)
+
+try: # Test for SSL features
+ import ssl
+ from ssl import CERT_REQUIRED, wrap_socket
+except ImportError:
+ pass
+
+try:
+ from ssl import HAS_SNI # Has SNI?
+except ImportError:
+ pass
+
+try:
+ from .ssltransport import SSLTransport
+except ImportError:
+ pass
+
+
+try: # Platform-specific: Python 3.6
+ from ssl import PROTOCOL_TLS
+
+ PROTOCOL_SSLv23 = PROTOCOL_TLS
+except ImportError:
+ try:
+ from ssl import PROTOCOL_SSLv23 as PROTOCOL_TLS
+
+ PROTOCOL_SSLv23 = PROTOCOL_TLS
+ except ImportError:
+ PROTOCOL_SSLv23 = PROTOCOL_TLS = 2
+
+try:
+ from ssl import PROTOCOL_TLS_CLIENT
+except ImportError:
+ PROTOCOL_TLS_CLIENT = PROTOCOL_TLS
+
+
+try:
+ from ssl import OP_NO_COMPRESSION, OP_NO_SSLv2, OP_NO_SSLv3
+except ImportError:
+ OP_NO_SSLv2, OP_NO_SSLv3 = 0x1000000, 0x2000000
+ OP_NO_COMPRESSION = 0x20000
+
+
+try: # OP_NO_TICKET was added in Python 3.6
+ from ssl import OP_NO_TICKET
+except ImportError:
+ OP_NO_TICKET = 0x4000
+
+
+# A secure default.
+# Sources for more information on TLS ciphers:
+#
+# - https://wiki.mozilla.org/Security/Server_Side_TLS
+# - https://www.ssllabs.com/projects/best-practices/index.html
+# - https://hynek.me/articles/hardening-your-web-servers-ssl-ciphers/
+#
+# The general intent is:
+# - prefer cipher suites that offer perfect forward secrecy (DHE/ECDHE),
+# - prefer ECDHE over DHE for better performance,
+# - prefer any AES-GCM and ChaCha20 over any AES-CBC for better performance and
+# security,
+# - prefer AES-GCM over ChaCha20 because hardware-accelerated AES is common,
+# - disable NULL authentication, MD5 MACs, DSS, and other
+# insecure ciphers for security reasons.
+# - NOTE: TLS 1.3 cipher suites are managed through a different interface
+# not exposed by CPython (yet!) and are enabled by default if they're available.
+DEFAULT_CIPHERS = ":".join(
+ [
+ "ECDHE+AESGCM",
+ "ECDHE+CHACHA20",
+ "DHE+AESGCM",
+ "DHE+CHACHA20",
+ "ECDH+AESGCM",
+ "DH+AESGCM",
+ "ECDH+AES",
+ "DH+AES",
+ "RSA+AESGCM",
+ "RSA+AES",
+ "!aNULL",
+ "!eNULL",
+ "!MD5",
+ "!DSS",
+ ]
+)
+
+try:
+ from ssl import SSLContext # Modern SSL?
+except ImportError:
+
+ class SSLContext(object): # Platform-specific: Python 2
+ def __init__(self, protocol_version):
+ self.protocol = protocol_version
+ # Use default values from a real SSLContext
+ self.check_hostname = False
+ self.verify_mode = ssl.CERT_NONE
+ self.ca_certs = None
+ self.options = 0
+ self.certfile = None
+ self.keyfile = None
+ self.ciphers = None
+
+ def load_cert_chain(self, certfile, keyfile):
+ self.certfile = certfile
+ self.keyfile = keyfile
+
+ def load_verify_locations(self, cafile=None, capath=None, cadata=None):
+ self.ca_certs = cafile
+
+ if capath is not None:
+ raise SSLError("CA directories not supported in older Pythons")
+
+ if cadata is not None:
+ raise SSLError("CA data not supported in older Pythons")
+
+ def set_ciphers(self, cipher_suite):
+ self.ciphers = cipher_suite
+
+ def wrap_socket(self, socket, server_hostname=None, server_side=False):
+ warnings.warn(
+ "A true SSLContext object is not available. This prevents "
+ "urllib3 from configuring SSL appropriately and may cause "
+ "certain SSL connections to fail. You can upgrade to a newer "
+ "version of Python to solve this. For more information, see "
+ "https://urllib3.readthedocs.io/en/1.26.x/advanced-usage.html"
+ "#ssl-warnings",
+ InsecurePlatformWarning,
+ )
+ kwargs = {
+ "keyfile": self.keyfile,
+ "certfile": self.certfile,
+ "ca_certs": self.ca_certs,
+ "cert_reqs": self.verify_mode,
+ "ssl_version": self.protocol,
+ "server_side": server_side,
+ }
+ return wrap_socket(socket, ciphers=self.ciphers, **kwargs)
+
+
+def assert_fingerprint(cert, fingerprint):
+ """
+ Checks if given fingerprint matches the supplied certificate.
+
+ :param cert:
+ Certificate as bytes object.
+ :param fingerprint:
+ Fingerprint as string of hexdigits, can be interspersed by colons.
+ """
+
+ fingerprint = fingerprint.replace(":", "").lower()
+ digest_length = len(fingerprint)
+ hashfunc = HASHFUNC_MAP.get(digest_length)
+ if not hashfunc:
+ raise SSLError("Fingerprint of invalid length: {0}".format(fingerprint))
+
+ # We need encode() here for py32; works on py2 and p33.
+ fingerprint_bytes = unhexlify(fingerprint.encode())
+
+ cert_digest = hashfunc(cert).digest()
+
+ if not _const_compare_digest(cert_digest, fingerprint_bytes):
+ raise SSLError(
+ 'Fingerprints did not match. Expected "{0}", got "{1}".'.format(
+ fingerprint, hexlify(cert_digest)
+ )
+ )
+
+
+def resolve_cert_reqs(candidate):
+ """
+ Resolves the argument to a numeric constant, which can be passed to
+ the wrap_socket function/method from the ssl module.
+ Defaults to :data:`ssl.CERT_REQUIRED`.
+ If given a string it is assumed to be the name of the constant in the
+ :mod:`ssl` module or its abbreviation.
+ (So you can specify `REQUIRED` instead of `CERT_REQUIRED`.
+ If it's neither `None` nor a string we assume it is already the numeric
+ constant which can directly be passed to wrap_socket.
+ """
+ if candidate is None:
+ return CERT_REQUIRED
+
+ if isinstance(candidate, str):
+ res = getattr(ssl, candidate, None)
+ if res is None:
+ res = getattr(ssl, "CERT_" + candidate)
+ return res
+
+ return candidate
+
+
+def resolve_ssl_version(candidate):
+ """
+ like resolve_cert_reqs
+ """
+ if candidate is None:
+ return PROTOCOL_TLS
+
+ if isinstance(candidate, str):
+ res = getattr(ssl, candidate, None)
+ if res is None:
+ res = getattr(ssl, "PROTOCOL_" + candidate)
+ return res
+
+ return candidate
+
+
+def create_urllib3_context(
+ ssl_version=None, cert_reqs=None, options=None, ciphers=None
+):
+ """All arguments have the same meaning as ``ssl_wrap_socket``.
+
+ By default, this function does a lot of the same work that
+ ``ssl.create_default_context`` does on Python 3.4+. It:
+
+ - Disables SSLv2, SSLv3, and compression
+ - Sets a restricted set of server ciphers
+
+ If you wish to enable SSLv3, you can do::
+
+ from urllib3.util import ssl_
+ context = ssl_.create_urllib3_context()
+ context.options &= ~ssl_.OP_NO_SSLv3
+
+ You can do the same to enable compression (substituting ``COMPRESSION``
+ for ``SSLv3`` in the last line above).
+
+ :param ssl_version:
+ The desired protocol version to use. This will default to
+ PROTOCOL_SSLv23 which will negotiate the highest protocol that both
+ the server and your installation of OpenSSL support.
+ :param cert_reqs:
+ Whether to require the certificate verification. This defaults to
+ ``ssl.CERT_REQUIRED``.
+ :param options:
+ Specific OpenSSL options. These default to ``ssl.OP_NO_SSLv2``,
+ ``ssl.OP_NO_SSLv3``, ``ssl.OP_NO_COMPRESSION``, and ``ssl.OP_NO_TICKET``.
+ :param ciphers:
+ Which cipher suites to allow the server to select.
+ :returns:
+ Constructed SSLContext object with specified options
+ :rtype: SSLContext
+ """
+ # PROTOCOL_TLS is deprecated in Python 3.10
+ if not ssl_version or ssl_version == PROTOCOL_TLS:
+ ssl_version = PROTOCOL_TLS_CLIENT
+
+ context = SSLContext(ssl_version)
+
+ context.set_ciphers(ciphers or DEFAULT_CIPHERS)
+
+ # Setting the default here, as we may have no ssl module on import
+ cert_reqs = ssl.CERT_REQUIRED if cert_reqs is None else cert_reqs
+
+ if options is None:
+ options = 0
+ # SSLv2 is easily broken and is considered harmful and dangerous
+ options |= OP_NO_SSLv2
+ # SSLv3 has several problems and is now dangerous
+ options |= OP_NO_SSLv3
+ # Disable compression to prevent CRIME attacks for OpenSSL 1.0+
+ # (issue #309)
+ options |= OP_NO_COMPRESSION
+ # TLSv1.2 only. Unless set explicitly, do not request tickets.
+ # This may save some bandwidth on wire, and although the ticket is encrypted,
+ # there is a risk associated with it being on wire,
+ # if the server is not rotating its ticketing keys properly.
+ options |= OP_NO_TICKET
+
+ context.options |= options
+
+ # Enable post-handshake authentication for TLS 1.3, see GH #1634. PHA is
+ # necessary for conditional client cert authentication with TLS 1.3.
+ # The attribute is None for OpenSSL <= 1.1.0 or does not exist in older
+ # versions of Python. We only enable on Python 3.7.4+ or if certificate
+ # verification is enabled to work around Python issue #37428
+ # See: https://bugs.python.org/issue37428
+ if (cert_reqs == ssl.CERT_REQUIRED or sys.version_info >= (3, 7, 4)) and getattr(
+ context, "post_handshake_auth", None
+ ) is not None:
+ context.post_handshake_auth = True
+
+ def disable_check_hostname():
+ if (
+ getattr(context, "check_hostname", None) is not None
+ ): # Platform-specific: Python 3.2
+ # We do our own verification, including fingerprints and alternative
+ # hostnames. So disable it here
+ context.check_hostname = False
+
+ # The order of the below lines setting verify_mode and check_hostname
+ # matter due to safe-guards SSLContext has to prevent an SSLContext with
+ # check_hostname=True, verify_mode=NONE/OPTIONAL. This is made even more
+ # complex because we don't know whether PROTOCOL_TLS_CLIENT will be used
+ # or not so we don't know the initial state of the freshly created SSLContext.
+ if cert_reqs == ssl.CERT_REQUIRED:
+ context.verify_mode = cert_reqs
+ disable_check_hostname()
+ else:
+ disable_check_hostname()
+ context.verify_mode = cert_reqs
+
+ # Enable logging of TLS session keys via defacto standard environment variable
+ # 'SSLKEYLOGFILE', if the feature is available (Python 3.8+). Skip empty values.
+ if hasattr(context, "keylog_filename"):
+ sslkeylogfile = os.environ.get("SSLKEYLOGFILE")
+ if sslkeylogfile:
+ context.keylog_filename = sslkeylogfile
+
+ return context
+
+
+def ssl_wrap_socket(
+ sock,
+ keyfile=None,
+ certfile=None,
+ cert_reqs=None,
+ ca_certs=None,
+ server_hostname=None,
+ ssl_version=None,
+ ciphers=None,
+ ssl_context=None,
+ ca_cert_dir=None,
+ key_password=None,
+ ca_cert_data=None,
+ tls_in_tls=False,
+):
+ """
+ All arguments except for server_hostname, ssl_context, and ca_cert_dir have
+ the same meaning as they do when using :func:`ssl.wrap_socket`.
+
+ :param server_hostname:
+ When SNI is supported, the expected hostname of the certificate
+ :param ssl_context:
+ A pre-made :class:`SSLContext` object. If none is provided, one will
+ be created using :func:`create_urllib3_context`.
+ :param ciphers:
+ A string of ciphers we wish the client to support.
+ :param ca_cert_dir:
+ A directory containing CA certificates in multiple separate files, as
+ supported by OpenSSL's -CApath flag or the capath argument to
+ SSLContext.load_verify_locations().
+ :param key_password:
+ Optional password if the keyfile is encrypted.
+ :param ca_cert_data:
+ Optional string containing CA certificates in PEM format suitable for
+ passing as the cadata parameter to SSLContext.load_verify_locations()
+ :param tls_in_tls:
+ Use SSLTransport to wrap the existing socket.
+ """
+ context = ssl_context
+ if context is None:
+ # Note: This branch of code and all the variables in it are no longer
+ # used by urllib3 itself. We should consider deprecating and removing
+ # this code.
+ context = create_urllib3_context(ssl_version, cert_reqs, ciphers=ciphers)
+
+ if ca_certs or ca_cert_dir or ca_cert_data:
+ try:
+ context.load_verify_locations(ca_certs, ca_cert_dir, ca_cert_data)
+ except (IOError, OSError) as e:
+ raise SSLError(e)
+
+ elif ssl_context is None and hasattr(context, "load_default_certs"):
+ # try to load OS default certs; works well on Windows (require Python3.4+)
+ context.load_default_certs()
+
+ # Attempt to detect if we get the goofy behavior of the
+ # keyfile being encrypted and OpenSSL asking for the
+ # passphrase via the terminal and instead error out.
+ if keyfile and key_password is None and _is_key_file_encrypted(keyfile):
+ raise SSLError("Client private key is encrypted, password is required")
+
+ if certfile:
+ if key_password is None:
+ context.load_cert_chain(certfile, keyfile)
+ else:
+ context.load_cert_chain(certfile, keyfile, key_password)
+
+ try:
+ if hasattr(context, "set_alpn_protocols"):
+ context.set_alpn_protocols(ALPN_PROTOCOLS)
+ except NotImplementedError: # Defensive: in CI, we always have set_alpn_protocols
+ pass
+
+ # If we detect server_hostname is an IP address then the SNI
+ # extension should not be used according to RFC3546 Section 3.1
+ use_sni_hostname = server_hostname and not is_ipaddress(server_hostname)
+ # SecureTransport uses server_hostname in certificate verification.
+ send_sni = (use_sni_hostname and HAS_SNI) or (
+ IS_SECURETRANSPORT and server_hostname
+ )
+ # Do not warn the user if server_hostname is an invalid SNI hostname.
+ if not HAS_SNI and use_sni_hostname:
+ warnings.warn(
+ "An HTTPS request has been made, but the SNI (Server Name "
+ "Indication) extension to TLS is not available on this platform. "
+ "This may cause the server to present an incorrect TLS "
+ "certificate, which can cause validation failures. You can upgrade to "
+ "a newer version of Python to solve this. For more information, see "
+ "https://urllib3.readthedocs.io/en/1.26.x/advanced-usage.html"
+ "#ssl-warnings",
+ SNIMissingWarning,
+ )
+
+ if send_sni:
+ ssl_sock = _ssl_wrap_socket_impl(
+ sock, context, tls_in_tls, server_hostname=server_hostname
+ )
+ else:
+ ssl_sock = _ssl_wrap_socket_impl(sock, context, tls_in_tls)
+ return ssl_sock
+
+
+def is_ipaddress(hostname):
+ """Detects whether the hostname given is an IPv4 or IPv6 address.
+ Also detects IPv6 addresses with Zone IDs.
+
+ :param str hostname: Hostname to examine.
+ :return: True if the hostname is an IP address, False otherwise.
+ """
+ if not six.PY2 and isinstance(hostname, bytes):
+ # IDN A-label bytes are ASCII compatible.
+ hostname = hostname.decode("ascii")
+ return bool(IPV4_RE.match(hostname) or BRACELESS_IPV6_ADDRZ_RE.match(hostname))
+
+
+def _is_key_file_encrypted(key_file):
+ """Detects if a key file is encrypted or not."""
+ with open(key_file, "r") as f:
+ for line in f:
+ # Look for Proc-Type: 4,ENCRYPTED
+ if "ENCRYPTED" in line:
+ return True
+
+ return False
+
+
+def _ssl_wrap_socket_impl(sock, ssl_context, tls_in_tls, server_hostname=None):
+ if tls_in_tls:
+ if not SSLTransport:
+ # Import error, ssl is not available.
+ raise ProxySchemeUnsupported(
+ "TLS in TLS requires support for the 'ssl' module"
+ )
+
+ SSLTransport._validate_ssl_context_for_tls_in_tls(ssl_context)
+ return SSLTransport(sock, ssl_context, server_hostname)
+
+ if server_hostname:
+ return ssl_context.wrap_socket(sock, server_hostname=server_hostname)
+ else:
+ return ssl_context.wrap_socket(sock)
diff --git a/openpype/hosts/fusion/vendor/urllib3/util/ssltransport.py b/openpype/hosts/fusion/vendor/urllib3/util/ssltransport.py
new file mode 100644
index 0000000000..c2186bced9
--- /dev/null
+++ b/openpype/hosts/fusion/vendor/urllib3/util/ssltransport.py
@@ -0,0 +1,221 @@
+import io
+import socket
+import ssl
+
+from urllib3.exceptions import ProxySchemeUnsupported
+from urllib3.packages import six
+
+SSL_BLOCKSIZE = 16384
+
+
+class SSLTransport:
+ """
+ The SSLTransport wraps an existing socket and establishes an SSL connection.
+
+ Contrary to Python's implementation of SSLSocket, it allows you to chain
+ multiple TLS connections together. It's particularly useful if you need to
+ implement TLS within TLS.
+
+ The class supports most of the socket API operations.
+ """
+
+ @staticmethod
+ def _validate_ssl_context_for_tls_in_tls(ssl_context):
+ """
+ Raises a ProxySchemeUnsupported if the provided ssl_context can't be used
+ for TLS in TLS.
+
+ The only requirement is that the ssl_context provides the 'wrap_bio'
+ methods.
+ """
+
+ if not hasattr(ssl_context, "wrap_bio"):
+ if six.PY2:
+ raise ProxySchemeUnsupported(
+ "TLS in TLS requires SSLContext.wrap_bio() which isn't "
+ "supported on Python 2"
+ )
+ else:
+ raise ProxySchemeUnsupported(
+ "TLS in TLS requires SSLContext.wrap_bio() which isn't "
+ "available on non-native SSLContext"
+ )
+
+ def __init__(
+ self, socket, ssl_context, server_hostname=None, suppress_ragged_eofs=True
+ ):
+ """
+ Create an SSLTransport around socket using the provided ssl_context.
+ """
+ self.incoming = ssl.MemoryBIO()
+ self.outgoing = ssl.MemoryBIO()
+
+ self.suppress_ragged_eofs = suppress_ragged_eofs
+ self.socket = socket
+
+ self.sslobj = ssl_context.wrap_bio(
+ self.incoming, self.outgoing, server_hostname=server_hostname
+ )
+
+ # Perform initial handshake.
+ self._ssl_io_loop(self.sslobj.do_handshake)
+
+ def __enter__(self):
+ return self
+
+ def __exit__(self, *_):
+ self.close()
+
+ def fileno(self):
+ return self.socket.fileno()
+
+ def read(self, len=1024, buffer=None):
+ return self._wrap_ssl_read(len, buffer)
+
+ def recv(self, len=1024, flags=0):
+ if flags != 0:
+ raise ValueError("non-zero flags not allowed in calls to recv")
+ return self._wrap_ssl_read(len)
+
+ def recv_into(self, buffer, nbytes=None, flags=0):
+ if flags != 0:
+ raise ValueError("non-zero flags not allowed in calls to recv_into")
+ if buffer and (nbytes is None):
+ nbytes = len(buffer)
+ elif nbytes is None:
+ nbytes = 1024
+ return self.read(nbytes, buffer)
+
+ def sendall(self, data, flags=0):
+ if flags != 0:
+ raise ValueError("non-zero flags not allowed in calls to sendall")
+ count = 0
+ with memoryview(data) as view, view.cast("B") as byte_view:
+ amount = len(byte_view)
+ while count < amount:
+ v = self.send(byte_view[count:])
+ count += v
+
+ def send(self, data, flags=0):
+ if flags != 0:
+ raise ValueError("non-zero flags not allowed in calls to send")
+ response = self._ssl_io_loop(self.sslobj.write, data)
+ return response
+
+ def makefile(
+ self, mode="r", buffering=None, encoding=None, errors=None, newline=None
+ ):
+ """
+ Python's httpclient uses makefile and buffered io when reading HTTP
+ messages and we need to support it.
+
+ This is unfortunately a copy and paste of socket.py makefile with small
+ changes to point to the socket directly.
+ """
+ if not set(mode) <= {"r", "w", "b"}:
+ raise ValueError("invalid mode %r (only r, w, b allowed)" % (mode,))
+
+ writing = "w" in mode
+ reading = "r" in mode or not writing
+ assert reading or writing
+ binary = "b" in mode
+ rawmode = ""
+ if reading:
+ rawmode += "r"
+ if writing:
+ rawmode += "w"
+ raw = socket.SocketIO(self, rawmode)
+ self.socket._io_refs += 1
+ if buffering is None:
+ buffering = -1
+ if buffering < 0:
+ buffering = io.DEFAULT_BUFFER_SIZE
+ if buffering == 0:
+ if not binary:
+ raise ValueError("unbuffered streams must be binary")
+ return raw
+ if reading and writing:
+ buffer = io.BufferedRWPair(raw, raw, buffering)
+ elif reading:
+ buffer = io.BufferedReader(raw, buffering)
+ else:
+ assert writing
+ buffer = io.BufferedWriter(raw, buffering)
+ if binary:
+ return buffer
+ text = io.TextIOWrapper(buffer, encoding, errors, newline)
+ text.mode = mode
+ return text
+
+ def unwrap(self):
+ self._ssl_io_loop(self.sslobj.unwrap)
+
+ def close(self):
+ self.socket.close()
+
+ def getpeercert(self, binary_form=False):
+ return self.sslobj.getpeercert(binary_form)
+
+ def version(self):
+ return self.sslobj.version()
+
+ def cipher(self):
+ return self.sslobj.cipher()
+
+ def selected_alpn_protocol(self):
+ return self.sslobj.selected_alpn_protocol()
+
+ def selected_npn_protocol(self):
+ return self.sslobj.selected_npn_protocol()
+
+ def shared_ciphers(self):
+ return self.sslobj.shared_ciphers()
+
+ def compression(self):
+ return self.sslobj.compression()
+
+ def settimeout(self, value):
+ self.socket.settimeout(value)
+
+ def gettimeout(self):
+ return self.socket.gettimeout()
+
+ def _decref_socketios(self):
+ self.socket._decref_socketios()
+
+ def _wrap_ssl_read(self, len, buffer=None):
+ try:
+ return self._ssl_io_loop(self.sslobj.read, len, buffer)
+ except ssl.SSLError as e:
+ if e.errno == ssl.SSL_ERROR_EOF and self.suppress_ragged_eofs:
+ return 0 # eof, return 0.
+ else:
+ raise
+
+ def _ssl_io_loop(self, func, *args):
+ """Performs an I/O loop between incoming/outgoing and the socket."""
+ should_loop = True
+ ret = None
+
+ while should_loop:
+ errno = None
+ try:
+ ret = func(*args)
+ except ssl.SSLError as e:
+ if e.errno not in (ssl.SSL_ERROR_WANT_READ, ssl.SSL_ERROR_WANT_WRITE):
+ # WANT_READ, and WANT_WRITE are expected, others are not.
+ raise e
+ errno = e.errno
+
+ buf = self.outgoing.read()
+ self.socket.sendall(buf)
+
+ if errno is None:
+ should_loop = False
+ elif errno == ssl.SSL_ERROR_WANT_READ:
+ buf = self.socket.recv(SSL_BLOCKSIZE)
+ if buf:
+ self.incoming.write(buf)
+ else:
+ self.incoming.write_eof()
+ return ret
diff --git a/openpype/hosts/fusion/vendor/urllib3/util/timeout.py b/openpype/hosts/fusion/vendor/urllib3/util/timeout.py
new file mode 100644
index 0000000000..ff69593b05
--- /dev/null
+++ b/openpype/hosts/fusion/vendor/urllib3/util/timeout.py
@@ -0,0 +1,268 @@
+from __future__ import absolute_import
+
+import time
+
+# The default socket timeout, used by httplib to indicate that no timeout was
+# specified by the user
+from socket import _GLOBAL_DEFAULT_TIMEOUT
+
+from ..exceptions import TimeoutStateError
+
+# A sentinel value to indicate that no timeout was specified by the user in
+# urllib3
+_Default = object()
+
+
+# Use time.monotonic if available.
+current_time = getattr(time, "monotonic", time.time)
+
+
+class Timeout(object):
+ """Timeout configuration.
+
+ Timeouts can be defined as a default for a pool:
+
+ .. code-block:: python
+
+ timeout = Timeout(connect=2.0, read=7.0)
+ http = PoolManager(timeout=timeout)
+ response = http.request('GET', 'http://example.com/')
+
+ Or per-request (which overrides the default for the pool):
+
+ .. code-block:: python
+
+ response = http.request('GET', 'http://example.com/', timeout=Timeout(10))
+
+ Timeouts can be disabled by setting all the parameters to ``None``:
+
+ .. code-block:: python
+
+ no_timeout = Timeout(connect=None, read=None)
+ response = http.request('GET', 'http://example.com/, timeout=no_timeout)
+
+
+ :param total:
+ This combines the connect and read timeouts into one; the read timeout
+ will be set to the time leftover from the connect attempt. In the
+ event that both a connect timeout and a total are specified, or a read
+ timeout and a total are specified, the shorter timeout will be applied.
+
+ Defaults to None.
+
+ :type total: int, float, or None
+
+ :param connect:
+ The maximum amount of time (in seconds) to wait for a connection
+ attempt to a server to succeed. Omitting the parameter will default the
+ connect timeout to the system default, probably `the global default
+ timeout in socket.py
+ `_.
+ None will set an infinite timeout for connection attempts.
+
+ :type connect: int, float, or None
+
+ :param read:
+ The maximum amount of time (in seconds) to wait between consecutive
+ read operations for a response from the server. Omitting the parameter
+ will default the read timeout to the system default, probably `the
+ global default timeout in socket.py
+ `_.
+ None will set an infinite timeout.
+
+ :type read: int, float, or None
+
+ .. note::
+
+ Many factors can affect the total amount of time for urllib3 to return
+ an HTTP response.
+
+ For example, Python's DNS resolver does not obey the timeout specified
+ on the socket. Other factors that can affect total request time include
+ high CPU load, high swap, the program running at a low priority level,
+ or other behaviors.
+
+ In addition, the read and total timeouts only measure the time between
+ read operations on the socket connecting the client and the server,
+ not the total amount of time for the request to return a complete
+ response. For most requests, the timeout is raised because the server
+ has not sent the first byte in the specified time. This is not always
+ the case; if a server streams one byte every fifteen seconds, a timeout
+ of 20 seconds will not trigger, even though the request will take
+ several minutes to complete.
+
+ If your goal is to cut off any request after a set amount of wall clock
+ time, consider having a second "watcher" thread to cut off a slow
+ request.
+ """
+
+ #: A sentinel object representing the default timeout value
+ DEFAULT_TIMEOUT = _GLOBAL_DEFAULT_TIMEOUT
+
+ def __init__(self, total=None, connect=_Default, read=_Default):
+ self._connect = self._validate_timeout(connect, "connect")
+ self._read = self._validate_timeout(read, "read")
+ self.total = self._validate_timeout(total, "total")
+ self._start_connect = None
+
+ def __repr__(self):
+ return "%s(connect=%r, read=%r, total=%r)" % (
+ type(self).__name__,
+ self._connect,
+ self._read,
+ self.total,
+ )
+
+ # __str__ provided for backwards compatibility
+ __str__ = __repr__
+
+ @classmethod
+ def _validate_timeout(cls, value, name):
+ """Check that a timeout attribute is valid.
+
+ :param value: The timeout value to validate
+ :param name: The name of the timeout attribute to validate. This is
+ used to specify in error messages.
+ :return: The validated and casted version of the given value.
+ :raises ValueError: If it is a numeric value less than or equal to
+ zero, or the type is not an integer, float, or None.
+ """
+ if value is _Default:
+ return cls.DEFAULT_TIMEOUT
+
+ if value is None or value is cls.DEFAULT_TIMEOUT:
+ return value
+
+ if isinstance(value, bool):
+ raise ValueError(
+ "Timeout cannot be a boolean value. It must "
+ "be an int, float or None."
+ )
+ try:
+ float(value)
+ except (TypeError, ValueError):
+ raise ValueError(
+ "Timeout value %s was %s, but it must be an "
+ "int, float or None." % (name, value)
+ )
+
+ try:
+ if value <= 0:
+ raise ValueError(
+ "Attempted to set %s timeout to %s, but the "
+ "timeout cannot be set to a value less "
+ "than or equal to 0." % (name, value)
+ )
+ except TypeError:
+ # Python 3
+ raise ValueError(
+ "Timeout value %s was %s, but it must be an "
+ "int, float or None." % (name, value)
+ )
+
+ return value
+
+ @classmethod
+ def from_float(cls, timeout):
+ """Create a new Timeout from a legacy timeout value.
+
+ The timeout value used by httplib.py sets the same timeout on the
+ connect(), and recv() socket requests. This creates a :class:`Timeout`
+ object that sets the individual timeouts to the ``timeout`` value
+ passed to this function.
+
+ :param timeout: The legacy timeout value.
+ :type timeout: integer, float, sentinel default object, or None
+ :return: Timeout object
+ :rtype: :class:`Timeout`
+ """
+ return Timeout(read=timeout, connect=timeout)
+
+ def clone(self):
+ """Create a copy of the timeout object
+
+ Timeout properties are stored per-pool but each request needs a fresh
+ Timeout object to ensure each one has its own start/stop configured.
+
+ :return: a copy of the timeout object
+ :rtype: :class:`Timeout`
+ """
+ # We can't use copy.deepcopy because that will also create a new object
+ # for _GLOBAL_DEFAULT_TIMEOUT, which socket.py uses as a sentinel to
+ # detect the user default.
+ return Timeout(connect=self._connect, read=self._read, total=self.total)
+
+ def start_connect(self):
+ """Start the timeout clock, used during a connect() attempt
+
+ :raises urllib3.exceptions.TimeoutStateError: if you attempt
+ to start a timer that has been started already.
+ """
+ if self._start_connect is not None:
+ raise TimeoutStateError("Timeout timer has already been started.")
+ self._start_connect = current_time()
+ return self._start_connect
+
+ def get_connect_duration(self):
+ """Gets the time elapsed since the call to :meth:`start_connect`.
+
+ :return: Elapsed time in seconds.
+ :rtype: float
+ :raises urllib3.exceptions.TimeoutStateError: if you attempt
+ to get duration for a timer that hasn't been started.
+ """
+ if self._start_connect is None:
+ raise TimeoutStateError(
+ "Can't get connect duration for timer that has not started."
+ )
+ return current_time() - self._start_connect
+
+ @property
+ def connect_timeout(self):
+ """Get the value to use when setting a connection timeout.
+
+ This will be a positive float or integer, the value None
+ (never timeout), or the default system timeout.
+
+ :return: Connect timeout.
+ :rtype: int, float, :attr:`Timeout.DEFAULT_TIMEOUT` or None
+ """
+ if self.total is None:
+ return self._connect
+
+ if self._connect is None or self._connect is self.DEFAULT_TIMEOUT:
+ return self.total
+
+ return min(self._connect, self.total)
+
+ @property
+ def read_timeout(self):
+ """Get the value for the read timeout.
+
+ This assumes some time has elapsed in the connection timeout and
+ computes the read timeout appropriately.
+
+ If self.total is set, the read timeout is dependent on the amount of
+ time taken by the connect timeout. If the connection time has not been
+ established, a :exc:`~urllib3.exceptions.TimeoutStateError` will be
+ raised.
+
+ :return: Value to use for the read timeout.
+ :rtype: int, float, :attr:`Timeout.DEFAULT_TIMEOUT` or None
+ :raises urllib3.exceptions.TimeoutStateError: If :meth:`start_connect`
+ has not yet been called on this object.
+ """
+ if (
+ self.total is not None
+ and self.total is not self.DEFAULT_TIMEOUT
+ and self._read is not None
+ and self._read is not self.DEFAULT_TIMEOUT
+ ):
+ # In case the connect timeout has not yet been established.
+ if self._start_connect is None:
+ return self._read
+ return max(0, min(self.total - self.get_connect_duration(), self._read))
+ elif self.total is not None and self.total is not self.DEFAULT_TIMEOUT:
+ return max(0, self.total - self.get_connect_duration())
+ else:
+ return self._read
diff --git a/openpype/hosts/fusion/vendor/urllib3/util/url.py b/openpype/hosts/fusion/vendor/urllib3/util/url.py
new file mode 100644
index 0000000000..81a03da9e3
--- /dev/null
+++ b/openpype/hosts/fusion/vendor/urllib3/util/url.py
@@ -0,0 +1,432 @@
+from __future__ import absolute_import
+
+import re
+from collections import namedtuple
+
+from ..exceptions import LocationParseError
+from ..packages import six
+
+url_attrs = ["scheme", "auth", "host", "port", "path", "query", "fragment"]
+
+# We only want to normalize urls with an HTTP(S) scheme.
+# urllib3 infers URLs without a scheme (None) to be http.
+NORMALIZABLE_SCHEMES = ("http", "https", None)
+
+# Almost all of these patterns were derived from the
+# 'rfc3986' module: https://github.com/python-hyper/rfc3986
+PERCENT_RE = re.compile(r"%[a-fA-F0-9]{2}")
+SCHEME_RE = re.compile(r"^(?:[a-zA-Z][a-zA-Z0-9+-]*:|/)")
+URI_RE = re.compile(
+ r"^(?:([a-zA-Z][a-zA-Z0-9+.-]*):)?"
+ r"(?://([^\\/?#]*))?"
+ r"([^?#]*)"
+ r"(?:\?([^#]*))?"
+ r"(?:#(.*))?$",
+ re.UNICODE | re.DOTALL,
+)
+
+IPV4_PAT = r"(?:[0-9]{1,3}\.){3}[0-9]{1,3}"
+HEX_PAT = "[0-9A-Fa-f]{1,4}"
+LS32_PAT = "(?:{hex}:{hex}|{ipv4})".format(hex=HEX_PAT, ipv4=IPV4_PAT)
+_subs = {"hex": HEX_PAT, "ls32": LS32_PAT}
+_variations = [
+ # 6( h16 ":" ) ls32
+ "(?:%(hex)s:){6}%(ls32)s",
+ # "::" 5( h16 ":" ) ls32
+ "::(?:%(hex)s:){5}%(ls32)s",
+ # [ h16 ] "::" 4( h16 ":" ) ls32
+ "(?:%(hex)s)?::(?:%(hex)s:){4}%(ls32)s",
+ # [ *1( h16 ":" ) h16 ] "::" 3( h16 ":" ) ls32
+ "(?:(?:%(hex)s:)?%(hex)s)?::(?:%(hex)s:){3}%(ls32)s",
+ # [ *2( h16 ":" ) h16 ] "::" 2( h16 ":" ) ls32
+ "(?:(?:%(hex)s:){0,2}%(hex)s)?::(?:%(hex)s:){2}%(ls32)s",
+ # [ *3( h16 ":" ) h16 ] "::" h16 ":" ls32
+ "(?:(?:%(hex)s:){0,3}%(hex)s)?::%(hex)s:%(ls32)s",
+ # [ *4( h16 ":" ) h16 ] "::" ls32
+ "(?:(?:%(hex)s:){0,4}%(hex)s)?::%(ls32)s",
+ # [ *5( h16 ":" ) h16 ] "::" h16
+ "(?:(?:%(hex)s:){0,5}%(hex)s)?::%(hex)s",
+ # [ *6( h16 ":" ) h16 ] "::"
+ "(?:(?:%(hex)s:){0,6}%(hex)s)?::",
+]
+
+UNRESERVED_PAT = r"ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789._!\-~"
+IPV6_PAT = "(?:" + "|".join([x % _subs for x in _variations]) + ")"
+ZONE_ID_PAT = "(?:%25|%)(?:[" + UNRESERVED_PAT + "]|%[a-fA-F0-9]{2})+"
+IPV6_ADDRZ_PAT = r"\[" + IPV6_PAT + r"(?:" + ZONE_ID_PAT + r")?\]"
+REG_NAME_PAT = r"(?:[^\[\]%:/?#]|%[a-fA-F0-9]{2})*"
+TARGET_RE = re.compile(r"^(/[^?#]*)(?:\?([^#]*))?(?:#.*)?$")
+
+IPV4_RE = re.compile("^" + IPV4_PAT + "$")
+IPV6_RE = re.compile("^" + IPV6_PAT + "$")
+IPV6_ADDRZ_RE = re.compile("^" + IPV6_ADDRZ_PAT + "$")
+BRACELESS_IPV6_ADDRZ_RE = re.compile("^" + IPV6_ADDRZ_PAT[2:-2] + "$")
+ZONE_ID_RE = re.compile("(" + ZONE_ID_PAT + r")\]$")
+
+_HOST_PORT_PAT = ("^(%s|%s|%s)(?::([0-9]{0,5}))?$") % (
+ REG_NAME_PAT,
+ IPV4_PAT,
+ IPV6_ADDRZ_PAT,
+)
+_HOST_PORT_RE = re.compile(_HOST_PORT_PAT, re.UNICODE | re.DOTALL)
+
+UNRESERVED_CHARS = set(
+ "ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789._-~"
+)
+SUB_DELIM_CHARS = set("!$&'()*+,;=")
+USERINFO_CHARS = UNRESERVED_CHARS | SUB_DELIM_CHARS | {":"}
+PATH_CHARS = USERINFO_CHARS | {"@", "/"}
+QUERY_CHARS = FRAGMENT_CHARS = PATH_CHARS | {"?"}
+
+
+class Url(namedtuple("Url", url_attrs)):
+ """
+ Data structure for representing an HTTP URL. Used as a return value for
+ :func:`parse_url`. Both the scheme and host are normalized as they are
+ both case-insensitive according to RFC 3986.
+ """
+
+ __slots__ = ()
+
+ def __new__(
+ cls,
+ scheme=None,
+ auth=None,
+ host=None,
+ port=None,
+ path=None,
+ query=None,
+ fragment=None,
+ ):
+ if path and not path.startswith("/"):
+ path = "/" + path
+ if scheme is not None:
+ scheme = scheme.lower()
+ return super(Url, cls).__new__(
+ cls, scheme, auth, host, port, path, query, fragment
+ )
+
+ @property
+ def hostname(self):
+ """For backwards-compatibility with urlparse. We're nice like that."""
+ return self.host
+
+ @property
+ def request_uri(self):
+ """Absolute path including the query string."""
+ uri = self.path or "/"
+
+ if self.query is not None:
+ uri += "?" + self.query
+
+ return uri
+
+ @property
+ def netloc(self):
+ """Network location including host and port"""
+ if self.port:
+ return "%s:%d" % (self.host, self.port)
+ return self.host
+
+ @property
+ def url(self):
+ """
+ Convert self into a url
+
+ This function should more or less round-trip with :func:`.parse_url`. The
+ returned url may not be exactly the same as the url inputted to
+ :func:`.parse_url`, but it should be equivalent by the RFC (e.g., urls
+ with a blank port will have : removed).
+
+ Example: ::
+
+ >>> U = parse_url('http://google.com/mail/')
+ >>> U.url
+ 'http://google.com/mail/'
+ >>> Url('http', 'username:password', 'host.com', 80,
+ ... '/path', 'query', 'fragment').url
+ 'http://username:password@host.com:80/path?query#fragment'
+ """
+ scheme, auth, host, port, path, query, fragment = self
+ url = u""
+
+ # We use "is not None" we want things to happen with empty strings (or 0 port)
+ if scheme is not None:
+ url += scheme + u"://"
+ if auth is not None:
+ url += auth + u"@"
+ if host is not None:
+ url += host
+ if port is not None:
+ url += u":" + str(port)
+ if path is not None:
+ url += path
+ if query is not None:
+ url += u"?" + query
+ if fragment is not None:
+ url += u"#" + fragment
+
+ return url
+
+ def __str__(self):
+ return self.url
+
+
+def split_first(s, delims):
+ """
+ .. deprecated:: 1.25
+
+ Given a string and an iterable of delimiters, split on the first found
+ delimiter. Return two split parts and the matched delimiter.
+
+ If not found, then the first part is the full input string.
+
+ Example::
+
+ >>> split_first('foo/bar?baz', '?/=')
+ ('foo', 'bar?baz', '/')
+ >>> split_first('foo/bar?baz', '123')
+ ('foo/bar?baz', '', None)
+
+ Scales linearly with number of delims. Not ideal for large number of delims.
+ """
+ min_idx = None
+ min_delim = None
+ for d in delims:
+ idx = s.find(d)
+ if idx < 0:
+ continue
+
+ if min_idx is None or idx < min_idx:
+ min_idx = idx
+ min_delim = d
+
+ if min_idx is None or min_idx < 0:
+ return s, "", None
+
+ return s[:min_idx], s[min_idx + 1 :], min_delim
+
+
+def _encode_invalid_chars(component, allowed_chars, encoding="utf-8"):
+ """Percent-encodes a URI component without reapplying
+ onto an already percent-encoded component.
+ """
+ if component is None:
+ return component
+
+ component = six.ensure_text(component)
+
+ # Normalize existing percent-encoded bytes.
+ # Try to see if the component we're encoding is already percent-encoded
+ # so we can skip all '%' characters but still encode all others.
+ component, percent_encodings = PERCENT_RE.subn(
+ lambda match: match.group(0).upper(), component
+ )
+
+ uri_bytes = component.encode("utf-8", "surrogatepass")
+ is_percent_encoded = percent_encodings == uri_bytes.count(b"%")
+ encoded_component = bytearray()
+
+ for i in range(0, len(uri_bytes)):
+ # Will return a single character bytestring on both Python 2 & 3
+ byte = uri_bytes[i : i + 1]
+ byte_ord = ord(byte)
+ if (is_percent_encoded and byte == b"%") or (
+ byte_ord < 128 and byte.decode() in allowed_chars
+ ):
+ encoded_component += byte
+ continue
+ encoded_component.extend(b"%" + (hex(byte_ord)[2:].encode().zfill(2).upper()))
+
+ return encoded_component.decode(encoding)
+
+
+def _remove_path_dot_segments(path):
+ # See http://tools.ietf.org/html/rfc3986#section-5.2.4 for pseudo-code
+ segments = path.split("/") # Turn the path into a list of segments
+ output = [] # Initialize the variable to use to store output
+
+ for segment in segments:
+ # '.' is the current directory, so ignore it, it is superfluous
+ if segment == ".":
+ continue
+ # Anything other than '..', should be appended to the output
+ elif segment != "..":
+ output.append(segment)
+ # In this case segment == '..', if we can, we should pop the last
+ # element
+ elif output:
+ output.pop()
+
+ # If the path starts with '/' and the output is empty or the first string
+ # is non-empty
+ if path.startswith("/") and (not output or output[0]):
+ output.insert(0, "")
+
+ # If the path starts with '/.' or '/..' ensure we add one more empty
+ # string to add a trailing '/'
+ if path.endswith(("/.", "/..")):
+ output.append("")
+
+ return "/".join(output)
+
+
+def _normalize_host(host, scheme):
+ if host:
+ if isinstance(host, six.binary_type):
+ host = six.ensure_str(host)
+
+ if scheme in NORMALIZABLE_SCHEMES:
+ is_ipv6 = IPV6_ADDRZ_RE.match(host)
+ if is_ipv6:
+ match = ZONE_ID_RE.search(host)
+ if match:
+ start, end = match.span(1)
+ zone_id = host[start:end]
+
+ if zone_id.startswith("%25") and zone_id != "%25":
+ zone_id = zone_id[3:]
+ else:
+ zone_id = zone_id[1:]
+ zone_id = "%" + _encode_invalid_chars(zone_id, UNRESERVED_CHARS)
+ return host[:start].lower() + zone_id + host[end:]
+ else:
+ return host.lower()
+ elif not IPV4_RE.match(host):
+ return six.ensure_str(
+ b".".join([_idna_encode(label) for label in host.split(".")])
+ )
+ return host
+
+
+def _idna_encode(name):
+ if name and any([ord(x) > 128 for x in name]):
+ try:
+ import idna
+ except ImportError:
+ six.raise_from(
+ LocationParseError("Unable to parse URL without the 'idna' module"),
+ None,
+ )
+ try:
+ return idna.encode(name.lower(), strict=True, std3_rules=True)
+ except idna.IDNAError:
+ six.raise_from(
+ LocationParseError(u"Name '%s' is not a valid IDNA label" % name), None
+ )
+ return name.lower().encode("ascii")
+
+
+def _encode_target(target):
+ """Percent-encodes a request target so that there are no invalid characters"""
+ path, query = TARGET_RE.match(target).groups()
+ target = _encode_invalid_chars(path, PATH_CHARS)
+ query = _encode_invalid_chars(query, QUERY_CHARS)
+ if query is not None:
+ target += "?" + query
+ return target
+
+
+def parse_url(url):
+ """
+ Given a url, return a parsed :class:`.Url` namedtuple. Best-effort is
+ performed to parse incomplete urls. Fields not provided will be None.
+ This parser is RFC 3986 compliant.
+
+ The parser logic and helper functions are based heavily on
+ work done in the ``rfc3986`` module.
+
+ :param str url: URL to parse into a :class:`.Url` namedtuple.
+
+ Partly backwards-compatible with :mod:`urlparse`.
+
+ Example::
+
+ >>> parse_url('http://google.com/mail/')
+ Url(scheme='http', host='google.com', port=None, path='/mail/', ...)
+ >>> parse_url('google.com:80')
+ Url(scheme=None, host='google.com', port=80, path=None, ...)
+ >>> parse_url('/foo?bar')
+ Url(scheme=None, host=None, port=None, path='/foo', query='bar', ...)
+ """
+ if not url:
+ # Empty
+ return Url()
+
+ source_url = url
+ if not SCHEME_RE.search(url):
+ url = "//" + url
+
+ try:
+ scheme, authority, path, query, fragment = URI_RE.match(url).groups()
+ normalize_uri = scheme is None or scheme.lower() in NORMALIZABLE_SCHEMES
+
+ if scheme:
+ scheme = scheme.lower()
+
+ if authority:
+ auth, _, host_port = authority.rpartition("@")
+ auth = auth or None
+ host, port = _HOST_PORT_RE.match(host_port).groups()
+ if auth and normalize_uri:
+ auth = _encode_invalid_chars(auth, USERINFO_CHARS)
+ if port == "":
+ port = None
+ else:
+ auth, host, port = None, None, None
+
+ if port is not None:
+ port = int(port)
+ if not (0 <= port <= 65535):
+ raise LocationParseError(url)
+
+ host = _normalize_host(host, scheme)
+
+ if normalize_uri and path:
+ path = _remove_path_dot_segments(path)
+ path = _encode_invalid_chars(path, PATH_CHARS)
+ if normalize_uri and query:
+ query = _encode_invalid_chars(query, QUERY_CHARS)
+ if normalize_uri and fragment:
+ fragment = _encode_invalid_chars(fragment, FRAGMENT_CHARS)
+
+ except (ValueError, AttributeError):
+ return six.raise_from(LocationParseError(source_url), None)
+
+ # For the sake of backwards compatibility we put empty
+ # string values for path if there are any defined values
+ # beyond the path in the URL.
+ # TODO: Remove this when we break backwards compatibility.
+ if not path:
+ if query is not None or fragment is not None:
+ path = ""
+ else:
+ path = None
+
+ # Ensure that each part of the URL is a `str` for
+ # backwards compatibility.
+ if isinstance(url, six.text_type):
+ ensure_func = six.ensure_text
+ else:
+ ensure_func = six.ensure_str
+
+ def ensure_type(x):
+ return x if x is None else ensure_func(x)
+
+ return Url(
+ scheme=ensure_type(scheme),
+ auth=ensure_type(auth),
+ host=ensure_type(host),
+ port=port,
+ path=ensure_type(path),
+ query=ensure_type(query),
+ fragment=ensure_type(fragment),
+ )
+
+
+def get_host(url):
+ """
+ Deprecated. Use :func:`parse_url` instead.
+ """
+ p = parse_url(url)
+ return p.scheme or "http", p.hostname, p.port
diff --git a/openpype/hosts/fusion/vendor/urllib3/util/wait.py b/openpype/hosts/fusion/vendor/urllib3/util/wait.py
new file mode 100644
index 0000000000..c280646c7b
--- /dev/null
+++ b/openpype/hosts/fusion/vendor/urllib3/util/wait.py
@@ -0,0 +1,153 @@
+import errno
+import select
+import sys
+from functools import partial
+
+try:
+ from time import monotonic
+except ImportError:
+ from time import time as monotonic
+
+__all__ = ["NoWayToWaitForSocketError", "wait_for_read", "wait_for_write"]
+
+
+class NoWayToWaitForSocketError(Exception):
+ pass
+
+
+# How should we wait on sockets?
+#
+# There are two types of APIs you can use for waiting on sockets: the fancy
+# modern stateful APIs like epoll/kqueue, and the older stateless APIs like
+# select/poll. The stateful APIs are more efficient when you have a lots of
+# sockets to keep track of, because you can set them up once and then use them
+# lots of times. But we only ever want to wait on a single socket at a time
+# and don't want to keep track of state, so the stateless APIs are actually
+# more efficient. So we want to use select() or poll().
+#
+# Now, how do we choose between select() and poll()? On traditional Unixes,
+# select() has a strange calling convention that makes it slow, or fail
+# altogether, for high-numbered file descriptors. The point of poll() is to fix
+# that, so on Unixes, we prefer poll().
+#
+# On Windows, there is no poll() (or at least Python doesn't provide a wrapper
+# for it), but that's OK, because on Windows, select() doesn't have this
+# strange calling convention; plain select() works fine.
+#
+# So: on Windows we use select(), and everywhere else we use poll(). We also
+# fall back to select() in case poll() is somehow broken or missing.
+
+if sys.version_info >= (3, 5):
+ # Modern Python, that retries syscalls by default
+ def _retry_on_intr(fn, timeout):
+ return fn(timeout)
+
+
+else:
+ # Old and broken Pythons.
+ def _retry_on_intr(fn, timeout):
+ if timeout is None:
+ deadline = float("inf")
+ else:
+ deadline = monotonic() + timeout
+
+ while True:
+ try:
+ return fn(timeout)
+ # OSError for 3 <= pyver < 3.5, select.error for pyver <= 2.7
+ except (OSError, select.error) as e:
+ # 'e.args[0]' incantation works for both OSError and select.error
+ if e.args[0] != errno.EINTR:
+ raise
+ else:
+ timeout = deadline - monotonic()
+ if timeout < 0:
+ timeout = 0
+ if timeout == float("inf"):
+ timeout = None
+ continue
+
+
+def select_wait_for_socket(sock, read=False, write=False, timeout=None):
+ if not read and not write:
+ raise RuntimeError("must specify at least one of read=True, write=True")
+ rcheck = []
+ wcheck = []
+ if read:
+ rcheck.append(sock)
+ if write:
+ wcheck.append(sock)
+ # When doing a non-blocking connect, most systems signal success by
+ # marking the socket writable. Windows, though, signals success by marked
+ # it as "exceptional". We paper over the difference by checking the write
+ # sockets for both conditions. (The stdlib selectors module does the same
+ # thing.)
+ fn = partial(select.select, rcheck, wcheck, wcheck)
+ rready, wready, xready = _retry_on_intr(fn, timeout)
+ return bool(rready or wready or xready)
+
+
+def poll_wait_for_socket(sock, read=False, write=False, timeout=None):
+ if not read and not write:
+ raise RuntimeError("must specify at least one of read=True, write=True")
+ mask = 0
+ if read:
+ mask |= select.POLLIN
+ if write:
+ mask |= select.POLLOUT
+ poll_obj = select.poll()
+ poll_obj.register(sock, mask)
+
+ # For some reason, poll() takes timeout in milliseconds
+ def do_poll(t):
+ if t is not None:
+ t *= 1000
+ return poll_obj.poll(t)
+
+ return bool(_retry_on_intr(do_poll, timeout))
+
+
+def null_wait_for_socket(*args, **kwargs):
+ raise NoWayToWaitForSocketError("no select-equivalent available")
+
+
+def _have_working_poll():
+ # Apparently some systems have a select.poll that fails as soon as you try
+ # to use it, either due to strange configuration or broken monkeypatching
+ # from libraries like eventlet/greenlet.
+ try:
+ poll_obj = select.poll()
+ _retry_on_intr(poll_obj.poll, 0)
+ except (AttributeError, OSError):
+ return False
+ else:
+ return True
+
+
+def wait_for_socket(*args, **kwargs):
+ # We delay choosing which implementation to use until the first time we're
+ # called. We could do it at import time, but then we might make the wrong
+ # decision if someone goes wild with monkeypatching select.poll after
+ # we're imported.
+ global wait_for_socket
+ if _have_working_poll():
+ wait_for_socket = poll_wait_for_socket
+ elif hasattr(select, "select"):
+ wait_for_socket = select_wait_for_socket
+ else: # Platform-specific: Appengine.
+ wait_for_socket = null_wait_for_socket
+ return wait_for_socket(*args, **kwargs)
+
+
+def wait_for_read(sock, timeout=None):
+ """Waits for reading to be available on a given socket.
+ Returns True if the socket is readable, or False if the timeout expired.
+ """
+ return wait_for_socket(sock, read=True, timeout=timeout)
+
+
+def wait_for_write(sock, timeout=None):
+ """Waits for writing to be available on a given socket.
+ Returns True if the socket is readable, or False if the timeout expired.
+ """
+ return wait_for_socket(sock, write=True, timeout=timeout)
diff --git a/openpype/hosts/harmony/api/README.md b/openpype/hosts/harmony/api/README.md
index 12f21f551a..be3920fe29 100644
--- a/openpype/hosts/harmony/api/README.md
+++ b/openpype/hosts/harmony/api/README.md
@@ -610,7 +610,7 @@ class ImageSequenceLoader(load.LoaderPlugin):
def update(self, container, representation):
node = container.pop("node")
- project_name = legacy_io.active_project()
+ project_name = get_current_project_name()
version = get_version_by_id(project_name, representation["parent"])
files = []
for f in version["data"]["files"]:
diff --git a/openpype/hosts/harmony/plugins/load/load_background.py b/openpype/hosts/harmony/plugins/load/load_background.py
index c28a87791e..853d347c2e 100644
--- a/openpype/hosts/harmony/plugins/load/load_background.py
+++ b/openpype/hosts/harmony/plugins/load/load_background.py
@@ -238,7 +238,8 @@ class BackgroundLoader(load.LoaderPlugin):
def load(self, context, name=None, namespace=None, data=None):
- with open(self.fname) as json_file:
+ path = self.filepath_from_context(context)
+ with open(path) as json_file:
data = json.load(json_file)
layers = list()
@@ -251,7 +252,7 @@ class BackgroundLoader(load.LoaderPlugin):
if layer.get("filename"):
layers.append(layer["filename"])
- bg_folder = os.path.dirname(self.fname)
+ bg_folder = os.path.dirname(path)
subset_name = context["subset"]["name"]
# read_node_name += "_{}".format(uuid.uuid4())
diff --git a/openpype/hosts/harmony/plugins/load/load_imagesequence.py b/openpype/hosts/harmony/plugins/load/load_imagesequence.py
index b95d25f507..754f82e5d5 100644
--- a/openpype/hosts/harmony/plugins/load/load_imagesequence.py
+++ b/openpype/hosts/harmony/plugins/load/load_imagesequence.py
@@ -34,7 +34,7 @@ class ImageSequenceLoader(load.LoaderPlugin):
data (dict, optional): Additional data passed into loader.
"""
- fname = Path(self.fname)
+ fname = Path(self.filepath_from_context(context))
self_name = self.__class__.__name__
collections, remainder = clique.assemble(
os.listdir(fname.parent.as_posix())
diff --git a/openpype/hosts/harmony/plugins/load/load_template.py b/openpype/hosts/harmony/plugins/load/load_template.py
index f3c69a9104..a78a1bf1ec 100644
--- a/openpype/hosts/harmony/plugins/load/load_template.py
+++ b/openpype/hosts/harmony/plugins/load/load_template.py
@@ -82,7 +82,6 @@ class TemplateLoader(load.LoaderPlugin):
node = harmony.find_node_by_name(node_name, "GROUP")
self_name = self.__class__.__name__
- update_and_replace = False
if is_representation_from_latest(representation):
self._set_green(node)
else:
diff --git a/openpype/hosts/harmony/plugins/publish/collect_farm_render.py b/openpype/hosts/harmony/plugins/publish/collect_farm_render.py
index f6b26eb3e8..af825c052a 100644
--- a/openpype/hosts/harmony/plugins/publish/collect_farm_render.py
+++ b/openpype/hosts/harmony/plugins/publish/collect_farm_render.py
@@ -5,7 +5,6 @@ from pathlib import Path
import attr
from openpype.lib import get_formatted_current_time
-from openpype.pipeline import legacy_io
from openpype.pipeline import publish
from openpype.pipeline.publish import RenderInstance
import openpype.hosts.harmony.api as harmony
@@ -99,6 +98,8 @@ class CollectFarmRender(publish.AbstractCollectRender):
self_name = self.__class__.__name__
+ asset_name = context.data["asset"]
+
for node in context.data["allNodes"]:
data = harmony.read(node)
@@ -141,18 +142,18 @@ class CollectFarmRender(publish.AbstractCollectRender):
source=context.data["currentFile"],
label=node.split("/")[1],
subset=subset_name,
- asset=legacy_io.Session["AVALON_ASSET"],
+ asset=asset_name,
task=task_name,
attachTo=False,
setMembers=[node],
publish=info[4],
- review=False,
renderer=None,
priority=50,
name=node.split("/")[1],
family="render.farm",
families=["render.farm"],
+ farm=True,
resolutionWidth=context.data["resolutionWidth"],
resolutionHeight=context.data["resolutionHeight"],
@@ -173,7 +174,6 @@ class CollectFarmRender(publish.AbstractCollectRender):
outputFormat=info[1],
outputStartFrame=info[3],
leadingZeros=info[2],
- toBeRenderedOn='deadline',
ignoreFrameHandleCheck=True
)
diff --git a/openpype/hosts/harmony/plugins/publish/collect_palettes.py b/openpype/hosts/harmony/plugins/publish/collect_palettes.py
index bbd60d1c55..e19057e302 100644
--- a/openpype/hosts/harmony/plugins/publish/collect_palettes.py
+++ b/openpype/hosts/harmony/plugins/publish/collect_palettes.py
@@ -1,6 +1,5 @@
# -*- coding: utf-8 -*-
"""Collect palettes from Harmony."""
-import os
import json
import re
@@ -32,6 +31,7 @@ class CollectPalettes(pyblish.api.ContextPlugin):
if (not any([re.search(pattern, task_name)
for pattern in self.allowed_tasks])):
return
+ asset_name = context.data["asset"]
for name, id in palettes.items():
instance = context.create_instance(name)
@@ -39,7 +39,7 @@ class CollectPalettes(pyblish.api.ContextPlugin):
"id": id,
"family": "harmony.palette",
'families': [],
- "asset": os.environ["AVALON_ASSET"],
+ "asset": asset_name,
"subset": "{}{}".format("palette", name)
})
self.log.info(
diff --git a/openpype/hosts/harmony/plugins/publish/collect_workfile.py b/openpype/hosts/harmony/plugins/publish/collect_workfile.py
index 3624147435..4492ab37a5 100644
--- a/openpype/hosts/harmony/plugins/publish/collect_workfile.py
+++ b/openpype/hosts/harmony/plugins/publish/collect_workfile.py
@@ -36,5 +36,5 @@ class CollectWorkfile(pyblish.api.ContextPlugin):
"family": family,
"families": [family],
"representations": [],
- "asset": os.environ["AVALON_ASSET"]
+ "asset": context.data["asset"]
})
diff --git a/openpype/hosts/harmony/plugins/publish/extract_render.py b/openpype/hosts/harmony/plugins/publish/extract_render.py
index 38b09902c1..5825d95a4a 100644
--- a/openpype/hosts/harmony/plugins/publish/extract_render.py
+++ b/openpype/hosts/harmony/plugins/publish/extract_render.py
@@ -94,15 +94,14 @@ class ExtractRender(pyblish.api.InstancePlugin):
# Generate thumbnail.
thumbnail_path = os.path.join(path, "thumbnail.png")
- ffmpeg_path = openpype.lib.get_ffmpeg_tool_path("ffmpeg")
- args = [
- ffmpeg_path,
+ args = openpype.lib.get_ffmpeg_tool_args(
+ "ffmpeg",
"-y",
"-i", os.path.join(path, list(collections[0])[0]),
"-vf", "scale=300:-1",
"-vframes", "1",
thumbnail_path
- ]
+ )
process = subprocess.Popen(
args,
stdout=subprocess.PIPE,
diff --git a/openpype/hosts/harmony/plugins/publish/extract_template.py b/openpype/hosts/harmony/plugins/publish/extract_template.py
index 458bf25a3c..e75459fe1e 100644
--- a/openpype/hosts/harmony/plugins/publish/extract_template.py
+++ b/openpype/hosts/harmony/plugins/publish/extract_template.py
@@ -75,7 +75,7 @@ class ExtractTemplate(publish.Extractor):
instance.data["representations"] = [representation]
instance.data["version_name"] = "{}_{}".format(
- instance.data["subset"], os.environ["AVALON_TASK"])
+ instance.data["subset"], instance.context.data["task"])
def get_backdrops(self, node: str) -> list:
"""Get backdrops for the node.
diff --git a/openpype/hosts/harmony/plugins/publish/validate_instances.py b/openpype/hosts/harmony/plugins/publish/validate_instances.py
index ac367082ef..7183de6048 100644
--- a/openpype/hosts/harmony/plugins/publish/validate_instances.py
+++ b/openpype/hosts/harmony/plugins/publish/validate_instances.py
@@ -1,8 +1,7 @@
-import os
-
import pyblish.api
import openpype.hosts.harmony.api as harmony
+from openpype.pipeline import get_current_asset_name
from openpype.pipeline.publish import (
ValidateContentsOrder,
PublishXmlValidationError,
@@ -30,7 +29,7 @@ class ValidateInstanceRepair(pyblish.api.Action):
for instance in instances:
data = harmony.read(instance.data["setMembers"][0])
- data["asset"] = os.environ["AVALON_ASSET"]
+ data["asset"] = get_current_asset_name()
harmony.imprint(instance.data["setMembers"][0], data)
@@ -44,7 +43,7 @@ class ValidateInstance(pyblish.api.InstancePlugin):
def process(self, instance):
instance_asset = instance.data["asset"]
- current_asset = os.environ["AVALON_ASSET"]
+ current_asset = get_current_asset_name()
msg = (
"Instance asset is not the same as current asset:"
f"\nInstance: {instance_asset}\nCurrent: {current_asset}"
diff --git a/openpype/hosts/harmony/plugins/publish/validate_scene_settings.py b/openpype/hosts/harmony/plugins/publish/validate_scene_settings.py
index 6e4c6955e4..866f12076a 100644
--- a/openpype/hosts/harmony/plugins/publish/validate_scene_settings.py
+++ b/openpype/hosts/harmony/plugins/publish/validate_scene_settings.py
@@ -67,7 +67,9 @@ class ValidateSceneSettings(pyblish.api.InstancePlugin):
expected_settings["frameEndHandle"] = expected_settings["frameEnd"] +\
expected_settings["handleEnd"]
- if (any(re.search(pattern, os.getenv('AVALON_TASK'))
+ task_name = instance.context.data["task"]
+
+ if (any(re.search(pattern, task_name)
for pattern in self.skip_resolution_check)):
self.log.info("Skipping resolution check because of "
"task name and pattern {}".format(
diff --git a/openpype/hosts/hiero/api/lib.py b/openpype/hosts/hiero/api/lib.py
index 09d73f5cc2..bf719160d1 100644
--- a/openpype/hosts/hiero/api/lib.py
+++ b/openpype/hosts/hiero/api/lib.py
@@ -22,9 +22,7 @@ except ImportError:
from openpype.client import get_project
from openpype.settings import get_project_settings
-from openpype.pipeline import (
- get_current_project_name, legacy_io, Anatomy
-)
+from openpype.pipeline import Anatomy, get_current_project_name
from openpype.pipeline.load import filter_containers
from openpype.lib import Logger
from . import tags
@@ -626,7 +624,7 @@ def get_publish_attribute(tag):
def sync_avalon_data_to_workfile():
# import session to get project dir
- project_name = legacy_io.Session["AVALON_PROJECT"]
+ project_name = get_current_project_name()
anatomy = Anatomy(project_name)
work_template = anatomy.templates["work"]["path"]
@@ -821,7 +819,7 @@ class PublishAction(QtWidgets.QAction):
# # create root node and save all metadata
# root_node = hiero.core.nuke.RootNode()
#
-# anatomy = Anatomy(os.environ["AVALON_PROJECT"])
+# anatomy = Anatomy(get_current_project_name())
# work_template = anatomy.templates["work"]["path"]
# root_path = anatomy.root_value_for_template(work_template)
#
@@ -1041,7 +1039,7 @@ def _set_hrox_project_knobs(doc, **knobs):
def apply_colorspace_project():
- project_name = os.getenv("AVALON_PROJECT")
+ project_name = get_current_project_name()
# get path the the active projects
project = get_current_project(remove_untitled=True)
current_file = project.path()
@@ -1110,7 +1108,7 @@ def apply_colorspace_project():
def apply_colorspace_clips():
- project_name = os.getenv("AVALON_PROJECT")
+ project_name = get_current_project_name()
project = get_current_project(remove_untitled=True)
clips = project.clips()
@@ -1264,7 +1262,7 @@ def check_inventory_versions(track_items=None):
if not containers:
return
- project_name = legacy_io.active_project()
+ project_name = get_current_project_name()
filter_result = filter_containers(containers, project_name)
for container in filter_result.latest:
set_track_color(container["_item"], clip_color_last)
diff --git a/openpype/hosts/hiero/api/menu.py b/openpype/hosts/hiero/api/menu.py
index 6baeb38cc0..9967e9c875 100644
--- a/openpype/hosts/hiero/api/menu.py
+++ b/openpype/hosts/hiero/api/menu.py
@@ -4,12 +4,18 @@ import sys
import hiero.core
from hiero.ui import findMenuAction
+from qtpy import QtGui
+
from openpype.lib import Logger
-from openpype.pipeline import legacy_io
from openpype.tools.utils import host_tools
+from openpype.settings import get_project_settings
+from openpype.pipeline import (
+ get_current_project_name,
+ get_current_asset_name,
+ get_current_task_name
+)
from . import tags
-from openpype.settings import get_project_settings
log = Logger.get_logger(__name__)
@@ -17,6 +23,13 @@ self = sys.modules[__name__]
self._change_context_menu = None
+def get_context_label():
+ return "{}, {}".format(
+ get_current_asset_name(),
+ get_current_task_name()
+ )
+
+
def update_menu_task_label():
"""Update the task label in Avalon menu to current session"""
@@ -27,10 +40,7 @@ def update_menu_task_label():
log.warning("Can't find menuItem: {}".format(object_name))
return
- label = "{}, {}".format(
- legacy_io.Session["AVALON_ASSET"],
- legacy_io.Session["AVALON_TASK"]
- )
+ label = get_context_label()
menu = found_menu.menu()
self._change_context_menu = label
@@ -43,7 +53,6 @@ def menu_install():
"""
- from qtpy import QtGui
from . import (
publish, launch_workfiles_app, reload_config,
apply_colorspace_project, apply_colorspace_clips
@@ -56,10 +65,7 @@ def menu_install():
menu_name = os.environ['AVALON_LABEL']
- context_label = "{0}, {1}".format(
- legacy_io.Session["AVALON_ASSET"],
- legacy_io.Session["AVALON_TASK"]
- )
+ context_label = get_context_label()
self._change_context_menu = context_label
@@ -154,7 +160,7 @@ def add_scripts_menu():
return
# load configuration of custom menu
- project_settings = get_project_settings(os.getenv("AVALON_PROJECT"))
+ project_settings = get_project_settings(get_current_project_name())
config = project_settings["hiero"]["scriptsmenu"]["definition"]
_menu = project_settings["hiero"]["scriptsmenu"]["name"]
diff --git a/openpype/hosts/hiero/api/plugin.py b/openpype/hosts/hiero/api/plugin.py
index a3f8a6c524..52f96261b2 100644
--- a/openpype/hosts/hiero/api/plugin.py
+++ b/openpype/hosts/hiero/api/plugin.py
@@ -12,6 +12,7 @@ from openpype.settings import get_current_project_settings
from openpype.lib import Logger
from openpype.pipeline import LoaderPlugin, LegacyCreator
from openpype.pipeline.context_tools import get_current_project_asset
+from openpype.pipeline.load import get_representation_path_from_context
from . import lib
log = Logger.get_logger(__name__)
@@ -316,20 +317,6 @@ class Spacer(QtWidgets.QWidget):
self.setLayout(layout)
-def get_reference_node_parents(ref):
- """Return all parent reference nodes of reference node
-
- Args:
- ref (str): reference node.
-
- Returns:
- list: The upstream parent reference nodes.
-
- """
- parents = []
- return parents
-
-
class SequenceLoader(LoaderPlugin):
"""A basic SequenceLoader for Resolve
@@ -393,7 +380,7 @@ class ClipLoader:
active_bin = None
data = dict()
- def __init__(self, cls, context, **options):
+ def __init__(self, cls, context, path, **options):
""" Initialize object
Arguments:
@@ -406,6 +393,7 @@ class ClipLoader:
self.__dict__.update(cls.__dict__)
self.context = context
self.active_project = lib.get_current_project()
+ self.fname = path
# try to get value from options or evaluate key value for `handles`
self.with_handles = options.get("handles") or bool(
@@ -467,7 +455,7 @@ class ClipLoader:
self.data["track_name"] = "_".join([subset, representation])
self.data["versionData"] = self.context["version"]["data"]
# gets file path
- file = self.fname
+ file = get_representation_path_from_context(self.context)
if not file:
repr_id = repr["_id"]
log.warning(
diff --git a/openpype/hosts/hiero/api/tags.py b/openpype/hosts/hiero/api/tags.py
index cb7bc14edb..02d8205414 100644
--- a/openpype/hosts/hiero/api/tags.py
+++ b/openpype/hosts/hiero/api/tags.py
@@ -5,7 +5,7 @@ import hiero
from openpype.client import get_project, get_assets
from openpype.lib import Logger
-from openpype.pipeline import legacy_io
+from openpype.pipeline import get_current_project_name
log = Logger.get_logger(__name__)
@@ -142,7 +142,7 @@ def add_tags_to_workfile():
nks_pres_tags = tag_data()
# Get project task types.
- project_name = legacy_io.active_project()
+ project_name = get_current_project_name()
project_doc = get_project(project_name)
tasks = project_doc["config"]["tasks"]
nks_pres_tags["[Tasks]"] = {}
diff --git a/openpype/hosts/hiero/plugins/load/load_clip.py b/openpype/hosts/hiero/plugins/load/load_clip.py
index c9bebfa8b2..05bd12d185 100644
--- a/openpype/hosts/hiero/plugins/load/load_clip.py
+++ b/openpype/hosts/hiero/plugins/load/load_clip.py
@@ -3,8 +3,8 @@ from openpype.client import (
get_last_version_by_subset_id
)
from openpype.pipeline import (
- legacy_io,
get_representation_path,
+ get_current_project_name,
)
from openpype.lib.transcoding import (
VIDEO_EXTENSIONS,
@@ -87,7 +87,8 @@ class LoadClip(phiero.SequenceLoader):
})
# load clip to timeline and get main variables
- track_item = phiero.ClipLoader(self, context, **options).load()
+ path = self.filepath_from_context(context)
+ track_item = phiero.ClipLoader(self, context, path, **options).load()
namespace = namespace or track_item.name()
version = context['version']
version_data = version.get("data", {})
@@ -147,7 +148,7 @@ class LoadClip(phiero.SequenceLoader):
track_item = phiero.get_track_items(
track_item_name=namespace).pop()
- project_name = legacy_io.active_project()
+ project_name = get_current_project_name()
version_doc = get_version_by_id(project_name, representation["parent"])
version_data = version_doc.get("data", {})
@@ -210,7 +211,7 @@ class LoadClip(phiero.SequenceLoader):
@classmethod
def set_item_color(cls, track_item, version_doc):
- project_name = legacy_io.active_project()
+ project_name = get_current_project_name()
last_version_doc = get_last_version_by_subset_id(
project_name, version_doc["parent"], fields=["_id"]
)
diff --git a/openpype/hosts/hiero/plugins/load/load_effects.py b/openpype/hosts/hiero/plugins/load/load_effects.py
index b61cca9731..31147d013f 100644
--- a/openpype/hosts/hiero/plugins/load/load_effects.py
+++ b/openpype/hosts/hiero/plugins/load/load_effects.py
@@ -9,8 +9,8 @@ from openpype.client import (
from openpype.pipeline import (
AVALON_CONTAINER_ID,
load,
- legacy_io,
- get_representation_path
+ get_representation_path,
+ get_current_project_name
)
from openpype.hosts.hiero import api as phiero
from openpype.lib import Logger
@@ -59,7 +59,8 @@ class LoadEffects(load.LoaderPlugin):
}
# getting file path
- file = self.fname.replace("\\", "/")
+ file = self.filepath_from_context(context)
+ file = file.replace("\\", "/")
if self._shared_loading(
file,
@@ -167,7 +168,7 @@ class LoadEffects(load.LoaderPlugin):
namespace = container['namespace']
# get timeline in out data
- project_name = legacy_io.active_project()
+ project_name = get_current_project_name()
version_doc = get_version_by_id(project_name, representation["parent"])
version_data = version_doc["data"]
clip_in = version_data["clipIn"]
diff --git a/openpype/hosts/hiero/plugins/publish/collect_clip_effects.py b/openpype/hosts/hiero/plugins/publish/collect_clip_effects.py
index d455ad4a4e..fcb1ab27a0 100644
--- a/openpype/hosts/hiero/plugins/publish/collect_clip_effects.py
+++ b/openpype/hosts/hiero/plugins/publish/collect_clip_effects.py
@@ -43,7 +43,6 @@ class CollectClipEffects(pyblish.api.InstancePlugin):
if review and review_track_index == _track_index:
continue
for sitem in sub_track_items:
- effect = None
# make sure this subtrack item is relative of track item
if ((track_item not in sitem.linkedItems())
and (len(sitem.linkedItems()) > 0)):
@@ -53,7 +52,6 @@ class CollectClipEffects(pyblish.api.InstancePlugin):
continue
effect = self.add_effect(_track_index, sitem)
-
if effect:
effects.update(effect)
diff --git a/openpype/hosts/hiero/plugins/publish/extract_frames.py b/openpype/hosts/hiero/plugins/publish/extract_frames.py
index f865d2fb39..803c338766 100644
--- a/openpype/hosts/hiero/plugins/publish/extract_frames.py
+++ b/openpype/hosts/hiero/plugins/publish/extract_frames.py
@@ -2,7 +2,7 @@ import os
import pyblish.api
from openpype.lib import (
- get_oiio_tools_path,
+ get_oiio_tool_args,
run_subprocess,
)
from openpype.pipeline import publish
@@ -18,7 +18,7 @@ class ExtractFrames(publish.Extractor):
movie_extensions = ["mov", "mp4"]
def process(self, instance):
- oiio_tool_path = get_oiio_tools_path()
+ oiio_tool_args = get_oiio_tool_args("oiiotool")
staging_dir = self.staging_dir(instance)
output_template = os.path.join(staging_dir, instance.data["name"])
sequence = instance.context.data["activeTimeline"]
@@ -36,7 +36,7 @@ class ExtractFrames(publish.Extractor):
output_path = output_template
output_path += ".{:04d}.{}".format(int(frame), output_ext)
- args = [oiio_tool_path]
+ args = list(oiio_tool_args)
ext = os.path.splitext(input_path)[1][1:]
if ext in self.movie_extensions:
diff --git a/openpype/hosts/hiero/plugins/publish/precollect_workfile.py b/openpype/hosts/hiero/plugins/publish/precollect_workfile.py
index 1f477c1639..5a66581531 100644
--- a/openpype/hosts/hiero/plugins/publish/precollect_workfile.py
+++ b/openpype/hosts/hiero/plugins/publish/precollect_workfile.py
@@ -7,7 +7,6 @@ from qtpy.QtGui import QPixmap
import hiero.ui
-from openpype.pipeline import legacy_io
from openpype.hosts.hiero.api.otio import hiero_export
@@ -19,7 +18,7 @@ class PrecollectWorkfile(pyblish.api.ContextPlugin):
def process(self, context):
- asset = legacy_io.Session["AVALON_ASSET"]
+ asset = context.data["asset"]
subset = "workfile"
active_timeline = hiero.ui.activeSequence()
project = active_timeline.project()
diff --git a/openpype/hosts/hiero/plugins/publish_old_workflow/collect_assetbuilds.py b/openpype/hosts/hiero/plugins/publish_old_workflow/collect_assetbuilds.py
index 5f96533052..767f7c30f7 100644
--- a/openpype/hosts/hiero/plugins/publish_old_workflow/collect_assetbuilds.py
+++ b/openpype/hosts/hiero/plugins/publish_old_workflow/collect_assetbuilds.py
@@ -1,6 +1,5 @@
from pyblish import api
from openpype.client import get_assets
-from openpype.pipeline import legacy_io
class CollectAssetBuilds(api.ContextPlugin):
@@ -18,7 +17,7 @@ class CollectAssetBuilds(api.ContextPlugin):
hosts = ["hiero"]
def process(self, context):
- project_name = legacy_io.active_project()
+ project_name = context.data["projectName"]
asset_builds = {}
for asset in get_assets(project_name):
if asset["data"]["entityType"] == "AssetBuild":
diff --git a/openpype/hosts/houdini/api/action.py b/openpype/hosts/houdini/api/action.py
index b1519ddd1d..77966d6d5c 100644
--- a/openpype/hosts/houdini/api/action.py
+++ b/openpype/hosts/houdini/api/action.py
@@ -42,3 +42,42 @@ class SelectInvalidAction(pyblish.api.Action):
node.setCurrent(True)
else:
self.log.info("No invalid nodes found.")
+
+
+class SelectROPAction(pyblish.api.Action):
+ """Select ROP.
+
+ It's used to select the associated ROPs with the errored instances.
+ """
+
+ label = "Select ROP"
+ on = "failed" # This action is only available on a failed plug-in
+ icon = "mdi.cursor-default-click"
+
+ def process(self, context, plugin):
+ errored_instances = get_errored_instances_from_context(context, plugin)
+
+ # Get the invalid nodes for the plug-ins
+ self.log.info("Finding ROP nodes..")
+ rop_nodes = list()
+ for instance in errored_instances:
+ node_path = instance.data.get("instance_node")
+ if not node_path:
+ continue
+
+ node = hou.node(node_path)
+ if not node:
+ continue
+
+ rop_nodes.append(node)
+
+ hou.clearAllSelected()
+ if rop_nodes:
+ self.log.info("Selecting ROP nodes: {}".format(
+ ", ".join(node.path() for node in rop_nodes)
+ ))
+ for node in rop_nodes:
+ node.setSelected(True)
+ node.setCurrent(True)
+ else:
+ self.log.info("No ROP nodes found.")
diff --git a/openpype/hosts/houdini/api/colorspace.py b/openpype/hosts/houdini/api/colorspace.py
index 7047644225..cc40b9df1c 100644
--- a/openpype/hosts/houdini/api/colorspace.py
+++ b/openpype/hosts/houdini/api/colorspace.py
@@ -1,7 +1,7 @@
import attr
import hou
from openpype.hosts.houdini.api.lib import get_color_management_preferences
-
+from openpype.pipeline.colorspace import get_display_view_colorspace_name
@attr.s
class LayerMetadata(object):
@@ -54,3 +54,16 @@ class ARenderProduct(object):
)
]
return colorspace_data
+
+
+def get_default_display_view_colorspace():
+ """Returns the colorspace attribute of the default (display, view) pair.
+
+ It's used for 'ociocolorspace' parm in OpenGL Node."""
+
+ prefs = get_color_management_preferences()
+ return get_display_view_colorspace_name(
+ config_path=prefs["config"],
+ display=prefs["display"],
+ view=prefs["view"]
+ )
diff --git a/openpype/hosts/houdini/api/creator_node_shelves.py b/openpype/hosts/houdini/api/creator_node_shelves.py
index 7c6122cffe..1f9fef7417 100644
--- a/openpype/hosts/houdini/api/creator_node_shelves.py
+++ b/openpype/hosts/houdini/api/creator_node_shelves.py
@@ -57,28 +57,31 @@ def create_interactive(creator_identifier, **kwargs):
list: The created instances.
"""
-
- # TODO Use Qt instead
- result, variant = hou.ui.readInput('Define variant name',
- buttons=("Ok", "Cancel"),
- initial_contents='Main',
- title="Define variant",
- help="Set the variant for the "
- "publish instance",
- close_choice=1)
- if result == 1:
- # User interrupted
- return
- variant = variant.strip()
- if not variant:
- raise RuntimeError("Empty variant value entered.")
-
host = registered_host()
context = CreateContext(host)
creator = context.manual_creators.get(creator_identifier)
if not creator:
- raise RuntimeError("Invalid creator identifier: "
- "{}".format(creator_identifier))
+ raise RuntimeError("Invalid creator identifier: {}".format(
+ creator_identifier)
+ )
+
+ # TODO Use Qt instead
+ result, variant = hou.ui.readInput(
+ "Define variant name",
+ buttons=("Ok", "Cancel"),
+ initial_contents=creator.get_default_variant(),
+ title="Define variant",
+ help="Set the variant for the publish instance",
+ close_choice=1
+ )
+
+ if result == 1:
+ # User interrupted
+ return
+
+ variant = variant.strip()
+ if not variant:
+ raise RuntimeError("Empty variant value entered.")
# TODO: Once more elaborate unique create behavior should exist per Creator
# instead of per network editor area then we should move this from here
diff --git a/openpype/hosts/houdini/api/lib.py b/openpype/hosts/houdini/api/lib.py
index a32e9d8d61..3db18ca69a 100644
--- a/openpype/hosts/houdini/api/lib.py
+++ b/openpype/hosts/houdini/api/lib.py
@@ -1,6 +1,7 @@
# -*- coding: utf-8 -*-
import sys
import os
+import errno
import re
import uuid
import logging
@@ -9,10 +10,15 @@ import json
import six
+from openpype.lib import StringTemplate
from openpype.client import get_asset_by_name
-from openpype.pipeline import legacy_io
-from openpype.pipeline.context_tools import get_current_project_asset
-
+from openpype.settings import get_current_project_settings
+from openpype.pipeline import get_current_project_name, get_current_asset_name
+from openpype.pipeline.context_tools import (
+ get_current_context_template_data,
+ get_current_project_asset
+)
+from openpype.widgets import popup
import hou
@@ -22,9 +28,12 @@ log = logging.getLogger(__name__)
JSON_PREFIX = "JSON:::"
-def get_asset_fps():
+def get_asset_fps(asset_doc=None):
"""Return current asset fps."""
- return get_current_project_asset()["data"].get("fps")
+
+ if asset_doc is None:
+ asset_doc = get_current_project_asset(fields=["data.fps"])
+ return asset_doc["data"]["fps"]
def set_id(node, unique_id, overwrite=False):
@@ -78,8 +87,8 @@ def generate_ids(nodes, asset_id=None):
"""
if asset_id is None:
- project_name = legacy_io.active_project()
- asset_name = legacy_io.Session["AVALON_ASSET"]
+ project_name = get_current_project_name()
+ asset_name = get_current_asset_name()
# Get the asset ID from the database for the asset of current context
asset_doc = get_asset_by_name(project_name, asset_name, fields=["_id"])
@@ -157,8 +166,6 @@ def validate_fps():
if current_fps != fps:
- from openpype.widgets import popup
-
# Find main window
parent = hou.ui.mainQtWindow()
if parent is None:
@@ -472,14 +479,19 @@ def maintained_selection():
def reset_framerange():
- """Set frame range to current asset"""
+ """Set frame range and FPS to current asset"""
- project_name = legacy_io.active_project()
- asset_name = legacy_io.Session["AVALON_ASSET"]
+ # Get asset data
+ project_name = get_current_project_name()
+ asset_name = get_current_asset_name()
# Get the asset ID from the database for the asset of current context
asset_doc = get_asset_by_name(project_name, asset_name)
asset_data = asset_doc["data"]
+ # Get FPS
+ fps = get_asset_fps(asset_doc)
+
+ # Get Start and End Frames
frame_start = asset_data.get("frameStart")
frame_end = asset_data.get("frameEnd")
@@ -493,6 +505,9 @@ def reset_framerange():
frame_start -= int(handle_start)
frame_end += int(handle_end)
+ # Set frame range and FPS
+ print("Setting scene FPS to {}".format(int(fps)))
+ set_scene_fps(fps)
hou.playbar.setFrameRange(frame_start, frame_end)
hou.playbar.setPlaybackRange(frame_start, frame_end)
hou.setFrame(frame_start)
@@ -638,3 +653,197 @@ def get_color_management_preferences():
"display": hou.Color.ocio_defaultDisplay(),
"view": hou.Color.ocio_defaultView()
}
+
+
+def get_obj_node_output(obj_node):
+ """Find output node.
+
+ If the node has any output node return the
+ output node with the minimum `outputidx`.
+ When no output is present return the node
+ with the display flag set. If no output node is
+ detected then None is returned.
+
+ Arguments:
+ node (hou.Node): The node to retrieve a single
+ the output node for.
+
+ Returns:
+ Optional[hou.Node]: The child output node.
+
+ """
+
+ outputs = obj_node.subnetOutputs()
+ if not outputs:
+ return
+
+ elif len(outputs) == 1:
+ return outputs[0]
+
+ else:
+ return min(outputs,
+ key=lambda node: node.evalParm('outputidx'))
+
+
+def get_output_children(output_node, include_sops=True):
+ """Recursively return a list of all output nodes
+ contained in this node including this node.
+
+ It works in a similar manner to output_node.allNodes().
+ """
+ out_list = [output_node]
+
+ if output_node.childTypeCategory() == hou.objNodeTypeCategory():
+ for child in output_node.children():
+ out_list += get_output_children(child, include_sops=include_sops)
+
+ elif include_sops and \
+ output_node.childTypeCategory() == hou.sopNodeTypeCategory():
+ out = get_obj_node_output(output_node)
+ if out:
+ out_list += [out]
+
+ return out_list
+
+
+def get_resolution_from_doc(doc):
+ """Get resolution from the given asset document. """
+
+ if not doc or "data" not in doc:
+ print("Entered document is not valid. \"{}\"".format(str(doc)))
+ return None
+
+ resolution_width = doc["data"].get("resolutionWidth")
+ resolution_height = doc["data"].get("resolutionHeight")
+
+ # Make sure both width and height are set
+ if resolution_width is None or resolution_height is None:
+ print("No resolution information found for \"{}\"".format(doc["name"]))
+ return None
+
+ return int(resolution_width), int(resolution_height)
+
+
+def set_camera_resolution(camera, asset_doc=None):
+ """Apply resolution to camera from asset document of the publish"""
+
+ if not asset_doc:
+ asset_doc = get_current_project_asset()
+
+ resolution = get_resolution_from_doc(asset_doc)
+
+ if resolution:
+ print("Setting camera resolution: {} -> {}x{}".format(
+ camera.name(), resolution[0], resolution[1]
+ ))
+ camera.parm("resx").set(resolution[0])
+ camera.parm("resy").set(resolution[1])
+
+
+def get_camera_from_container(container):
+ """Get camera from container node. """
+
+ cameras = container.recursiveGlob(
+ "*",
+ filter=hou.nodeTypeFilter.ObjCamera,
+ include_subnets=False
+ )
+
+ assert len(cameras) == 1, "Camera instance must have only one camera"
+ return cameras[0]
+
+
+def get_context_var_changes():
+ """get context var changes."""
+
+ houdini_vars_to_update = {}
+
+ project_settings = get_current_project_settings()
+ houdini_vars_settings = \
+ project_settings["houdini"]["general"]["update_houdini_var_context"]
+
+ if not houdini_vars_settings["enabled"]:
+ return houdini_vars_to_update
+
+ houdini_vars = houdini_vars_settings["houdini_vars"]
+
+ # No vars specified - nothing to do
+ if not houdini_vars:
+ return houdini_vars_to_update
+
+ # Get Template data
+ template_data = get_current_context_template_data()
+
+ # Set Houdini Vars
+ for item in houdini_vars:
+ # For consistency reasons we always force all vars to be uppercase
+ # Also remove any leading, and trailing whitespaces.
+ var = item["var"].strip().upper()
+
+ # get and resolve template in value
+ item_value = StringTemplate.format_template(
+ item["value"],
+ template_data
+ )
+
+ if var == "JOB" and item_value == "":
+ # sync $JOB to $HIP if $JOB is empty
+ item_value = os.environ["HIP"]
+
+ if item["is_directory"]:
+ item_value = item_value.replace("\\", "/")
+
+ current_value = hou.hscript("echo -n `${}`".format(var))[0]
+
+ if current_value != item_value:
+ houdini_vars_to_update[var] = (
+ current_value, item_value, item["is_directory"]
+ )
+
+ return houdini_vars_to_update
+
+
+def update_houdini_vars_context():
+ """Update asset context variables"""
+
+ for var, (_old, new, is_directory) in get_context_var_changes().items():
+ if is_directory:
+ try:
+ os.makedirs(new)
+ except OSError as e:
+ if e.errno != errno.EEXIST:
+ print(
+ "Failed to create ${} dir. Maybe due to "
+ "insufficient permissions.".format(var)
+ )
+
+ hou.hscript("set {}={}".format(var, new))
+ os.environ[var] = new
+ print("Updated ${} to {}".format(var, new))
+
+
+def update_houdini_vars_context_dialog():
+ """Show pop-up to update asset context variables"""
+ update_vars = get_context_var_changes()
+ if not update_vars:
+ # Nothing to change
+ print("Nothing to change, Houdini vars are already up to date.")
+ return
+
+ message = "\n".join(
+ "${}: {} -> {}".format(var, old or "None", new or "None")
+ for var, (old, new, _is_directory) in update_vars.items()
+ )
+
+ # TODO: Use better UI!
+ parent = hou.ui.mainQtWindow()
+ dialog = popup.Popup(parent=parent)
+ dialog.setModal(True)
+ dialog.setWindowTitle("Houdini scene has outdated asset variables")
+ dialog.setMessage(message)
+ dialog.setButtonText("Fix")
+
+ # on_show is the Fix button clicked callback
+ dialog.on_clicked.connect(update_houdini_vars_context)
+
+ dialog.show()
diff --git a/openpype/hosts/houdini/api/pipeline.py b/openpype/hosts/houdini/api/pipeline.py
index 8a26bbb504..f8db45c56b 100644
--- a/openpype/hosts/houdini/api/pipeline.py
+++ b/openpype/hosts/houdini/api/pipeline.py
@@ -14,6 +14,7 @@ import pyblish.api
from openpype.pipeline import (
register_creator_plugin_path,
register_loader_plugin_path,
+ register_inventory_action_path,
AVALON_CONTAINER_ID,
)
from openpype.pipeline.load import any_outdated_containers
@@ -25,7 +26,6 @@ from openpype.lib import (
emit_event,
)
-from .lib import get_asset_fps
log = logging.getLogger("openpype.hosts.houdini")
@@ -56,6 +56,7 @@ class HoudiniHost(HostBase, IWorkfileHost, ILoadHost, IPublishHost):
pyblish.api.register_plugin_path(PUBLISH_PATH)
register_loader_plugin_path(LOAD_PATH)
register_creator_plugin_path(CREATE_PATH)
+ register_inventory_action_path(INVENTORY_PATH)
log.info("Installing callbacks ... ")
# register_event_callback("init", on_init)
@@ -299,11 +300,36 @@ def on_save():
log.info("Running callback on save..")
+ # update houdini vars
+ lib.update_houdini_vars_context_dialog()
+
nodes = lib.get_id_required_nodes()
for node, new_id in lib.generate_ids(nodes):
lib.set_id(node, new_id, overwrite=False)
+def _show_outdated_content_popup():
+ # Get main window
+ parent = lib.get_main_window()
+ if parent is None:
+ log.info("Skipping outdated content pop-up "
+ "because Houdini window can't be found.")
+ else:
+ from openpype.widgets import popup
+
+ # Show outdated pop-up
+ def _on_show_inventory():
+ from openpype.tools.utils import host_tools
+ host_tools.show_scene_inventory(parent=parent)
+
+ dialog = popup.Popup(parent=parent)
+ dialog.setWindowTitle("Houdini scene has outdated content")
+ dialog.setMessage("There are outdated containers in "
+ "your Houdini scene.")
+ dialog.on_clicked.connect(_on_show_inventory)
+ dialog.show()
+
+
def on_open():
if not hou.isUIAvailable():
@@ -312,33 +338,26 @@ def on_open():
log.info("Running callback on open..")
+ # update houdini vars
+ lib.update_houdini_vars_context_dialog()
+
# Validate FPS after update_task_from_path to
# ensure it is using correct FPS for the asset
lib.validate_fps()
if any_outdated_containers():
- from openpype.widgets import popup
-
- log.warning("Scene has outdated content.")
-
- # Get main window
parent = lib.get_main_window()
if parent is None:
- log.info("Skipping outdated content pop-up "
- "because Houdini window can't be found.")
+ # When opening Houdini with last workfile on launch the UI hasn't
+ # initialized yet completely when the `on_open` callback triggers.
+ # We defer the dialog popup to wait for the UI to become available.
+ # We assume it will open because `hou.isUIAvailable()` returns True
+ import hdefereval
+ hdefereval.executeDeferred(_show_outdated_content_popup)
else:
+ _show_outdated_content_popup()
- # Show outdated pop-up
- def _on_show_inventory():
- from openpype.tools.utils import host_tools
- host_tools.show_scene_inventory(parent=parent)
-
- dialog = popup.Popup(parent=parent)
- dialog.setWindowTitle("Houdini scene has outdated content")
- dialog.setMessage("There are outdated containers in "
- "your Houdini scene.")
- dialog.on_clicked.connect(_on_show_inventory)
- dialog.show()
+ log.warning("Scene has outdated content.")
def on_new():
@@ -385,12 +404,8 @@ def _set_context_settings():
None
"""
- # Set new scene fps
- fps = get_asset_fps()
- print("Setting scene FPS to %i" % fps)
- lib.set_scene_fps(fps)
-
lib.reset_framerange()
+ lib.update_houdini_vars_context()
def on_pyblish_instance_toggled(instance, new_value, old_value):
diff --git a/openpype/hosts/houdini/api/plugin.py b/openpype/hosts/houdini/api/plugin.py
index 1e7eaa7e22..a0a7dcc2e4 100644
--- a/openpype/hosts/houdini/api/plugin.py
+++ b/openpype/hosts/houdini/api/plugin.py
@@ -167,9 +167,12 @@ class HoudiniCreatorBase(object):
class HoudiniCreator(NewCreator, HoudiniCreatorBase):
"""Base class for most of the Houdini creator plugins."""
selected_nodes = []
+ settings_name = None
def create(self, subset_name, instance_data, pre_create_data):
try:
+ self.selected_nodes = []
+
if pre_create_data.get("use_selection"):
self.selected_nodes = hou.selectedNodes()
@@ -184,13 +187,14 @@ class HoudiniCreator(NewCreator, HoudiniCreatorBase):
self.customize_node_look(instance_node)
instance_data["instance_node"] = instance_node.path()
+ instance_data["instance_id"] = instance_node.path()
instance = CreatedInstance(
self.family,
subset_name,
instance_data,
self)
self._add_instance_to_context(instance)
- imprint(instance_node, instance.data_to_store())
+ self.imprint(instance_node, instance.data_to_store())
return instance
except hou.Error as er:
@@ -219,25 +223,41 @@ class HoudiniCreator(NewCreator, HoudiniCreatorBase):
self.cache_subsets(self.collection_shared_data)
for instance in self.collection_shared_data[
"houdini_cached_subsets"].get(self.identifier, []):
+
+ node_data = read(instance)
+
+ # Node paths are always the full node path since that is unique
+ # Because it's the node's path it's not written into attributes
+ # but explicitly collected
+ node_path = instance.path()
+ node_data["instance_id"] = node_path
+ node_data["instance_node"] = node_path
+
created_instance = CreatedInstance.from_existing(
- read(instance), self
+ node_data, self
)
self._add_instance_to_context(created_instance)
def update_instances(self, update_list):
for created_inst, changes in update_list:
instance_node = hou.node(created_inst.get("instance_node"))
-
new_values = {
key: changes[key].new_value
for key in changes.changed_keys
}
- imprint(
+ self.imprint(
instance_node,
new_values,
update=True
)
+ def imprint(self, node, values, update=False):
+ # Never store instance node and instance id since that data comes
+ # from the node's path
+ values.pop("instance_node", None)
+ values.pop("instance_id", None)
+ imprint(node, values, update=update)
+
def remove_instances(self, instances):
"""Remove specified instance from the scene.
@@ -292,3 +312,21 @@ class HoudiniCreator(NewCreator, HoudiniCreatorBase):
"""
return [hou.ropNodeTypeCategory()]
+
+ def apply_settings(self, project_settings):
+ """Method called on initialization of plugin to apply settings."""
+
+ settings_name = self.settings_name
+ if settings_name is None:
+ settings_name = self.__class__.__name__
+
+ settings = project_settings["houdini"]["create"]
+ settings = settings.get(settings_name)
+ if settings is None:
+ self.log.debug(
+ "No settings found for {}".format(self.__class__.__name__)
+ )
+ return
+
+ for key, value in settings.items():
+ setattr(self, key, value)
diff --git a/openpype/hosts/houdini/api/shelves.py b/openpype/hosts/houdini/api/shelves.py
index 6e0f367f62..21e44e494a 100644
--- a/openpype/hosts/houdini/api/shelves.py
+++ b/openpype/hosts/houdini/api/shelves.py
@@ -4,6 +4,7 @@ import logging
import platform
from openpype.settings import get_project_settings
+from openpype.pipeline import get_current_project_name
import hou
@@ -17,7 +18,8 @@ def generate_shelves():
current_os = platform.system().lower()
# load configuration of houdini shelves
- project_settings = get_project_settings(os.getenv("AVALON_PROJECT"))
+ project_name = get_current_project_name()
+ project_settings = get_project_settings(project_name)
shelves_set_config = project_settings["houdini"]["shelves"]
if not shelves_set_config:
diff --git a/openpype/hosts/houdini/hooks/set_paths.py b/openpype/hosts/houdini/hooks/set_paths.py
index 04a33b1643..b23659e23b 100644
--- a/openpype/hosts/houdini/hooks/set_paths.py
+++ b/openpype/hosts/houdini/hooks/set_paths.py
@@ -1,4 +1,4 @@
-from openpype.lib import PreLaunchHook
+from openpype.lib.applications import PreLaunchHook, LaunchTypes
class SetPath(PreLaunchHook):
@@ -6,7 +6,8 @@ class SetPath(PreLaunchHook):
Hook `GlobalHostDataHook` must be executed before this hook.
"""
- app_groups = ["houdini"]
+ app_groups = {"houdini"}
+ launch_types = {LaunchTypes.local}
def execute(self):
workdir = self.launch_context.env.get("AVALON_WORKDIR", "")
diff --git a/openpype/hosts/houdini/plugins/create/convert_legacy.py b/openpype/hosts/houdini/plugins/create/convert_legacy.py
index e549c9dc26..86103e3369 100644
--- a/openpype/hosts/houdini/plugins/create/convert_legacy.py
+++ b/openpype/hosts/houdini/plugins/create/convert_legacy.py
@@ -69,6 +69,8 @@ class HoudiniLegacyConvertor(SubsetConvertorPlugin):
"creator_identifier": self.family_to_id[family],
"instance_node": subset.path()
}
+ if family == "pointcache":
+ data["families"] = ["abc"]
self.log.info("Converting {} to {}".format(
subset.path(), self.family_to_id[family]))
imprint(subset, data)
diff --git a/openpype/hosts/houdini/plugins/create/create_arnold_ass.py b/openpype/hosts/houdini/plugins/create/create_arnold_ass.py
index 8b310753d0..12d08f7d83 100644
--- a/openpype/hosts/houdini/plugins/create/create_arnold_ass.py
+++ b/openpype/hosts/houdini/plugins/create/create_arnold_ass.py
@@ -10,9 +10,10 @@ class CreateArnoldAss(plugin.HoudiniCreator):
label = "Arnold ASS"
family = "ass"
icon = "magic"
- defaults = ["Main"]
# Default extension: `.ass` or `.ass.gz`
+ # however calling HoudiniCreator.create()
+ # will override it by the value in the project settings
ext = ".ass"
def create(self, subset_name, instance_data, pre_create_data):
diff --git a/openpype/hosts/houdini/plugins/create/create_arnold_rop.py b/openpype/hosts/houdini/plugins/create/create_arnold_rop.py
index bddf26dbd5..b58c377a20 100644
--- a/openpype/hosts/houdini/plugins/create/create_arnold_rop.py
+++ b/openpype/hosts/houdini/plugins/create/create_arnold_rop.py
@@ -1,5 +1,5 @@
from openpype.hosts.houdini.api import plugin
-from openpype.lib import EnumDef
+from openpype.lib import EnumDef, BoolDef
class CreateArnoldRop(plugin.HoudiniCreator):
@@ -9,7 +9,6 @@ class CreateArnoldRop(plugin.HoudiniCreator):
label = "Arnold ROP"
family = "arnold_rop"
icon = "magic"
- defaults = ["master"]
# Default extension
ext = "exr"
@@ -24,7 +23,7 @@ class CreateArnoldRop(plugin.HoudiniCreator):
# Add chunk size attribute
instance_data["chunkSize"] = 1
# Submit for job publishing
- instance_data["farm"] = True
+ instance_data["farm"] = pre_create_data.get("farm")
instance = super(CreateArnoldRop, self).create(
subset_name,
@@ -64,6 +63,9 @@ class CreateArnoldRop(plugin.HoudiniCreator):
]
return attrs + [
+ BoolDef("farm",
+ label="Submitting to Farm",
+ default=True),
EnumDef("image_format",
image_format_enum,
default=self.ext,
diff --git a/openpype/hosts/houdini/plugins/create/create_bgeo.py b/openpype/hosts/houdini/plugins/create/create_bgeo.py
new file mode 100644
index 0000000000..a3f31e7e94
--- /dev/null
+++ b/openpype/hosts/houdini/plugins/create/create_bgeo.py
@@ -0,0 +1,92 @@
+# -*- coding: utf-8 -*-
+"""Creator plugin for creating pointcache bgeo files."""
+from openpype.hosts.houdini.api import plugin
+from openpype.pipeline import CreatedInstance, CreatorError
+from openpype.lib import EnumDef
+
+
+class CreateBGEO(plugin.HoudiniCreator):
+ """BGEO pointcache creator."""
+ identifier = "io.openpype.creators.houdini.bgeo"
+ label = "PointCache (Bgeo)"
+ family = "pointcache"
+ icon = "gears"
+
+ def create(self, subset_name, instance_data, pre_create_data):
+ import hou
+
+ instance_data.pop("active", None)
+
+ instance_data.update({"node_type": "geometry"})
+
+ instance = super(CreateBGEO, self).create(
+ subset_name,
+ instance_data,
+ pre_create_data) # type: CreatedInstance
+
+ instance_node = hou.node(instance.get("instance_node"))
+
+ file_path = "{}{}".format(
+ hou.text.expandString("$HIP/pyblish/"),
+ "{}.$F4.{}".format(
+ subset_name,
+ pre_create_data.get("bgeo_type") or "bgeo.sc")
+ )
+ parms = {
+ "sopoutput": file_path
+ }
+
+ instance_node.parm("trange").set(1)
+ if self.selected_nodes:
+ # if selection is on SOP level, use it
+ if isinstance(self.selected_nodes[0], hou.SopNode):
+ parms["soppath"] = self.selected_nodes[0].path()
+ else:
+ # try to find output node with the lowest index
+ outputs = [
+ child for child in self.selected_nodes[0].children()
+ if child.type().name() == "output"
+ ]
+ if not outputs:
+ instance_node.setParms(parms)
+ raise CreatorError((
+ "Missing output node in SOP level for the selection. "
+ "Please select correct SOP path in created instance."
+ ))
+ outputs.sort(key=lambda output: output.evalParm("outputidx"))
+ parms["soppath"] = outputs[0].path()
+
+ instance_node.setParms(parms)
+
+ def get_pre_create_attr_defs(self):
+ attrs = super().get_pre_create_attr_defs()
+ bgeo_enum = [
+ {
+ "value": "bgeo",
+ "label": "uncompressed bgeo (.bgeo)"
+ },
+ {
+ "value": "bgeosc",
+ "label": "BLOSC compressed bgeo (.bgeosc)"
+ },
+ {
+ "value": "bgeo.sc",
+ "label": "BLOSC compressed bgeo (.bgeo.sc)"
+ },
+ {
+ "value": "bgeo.gz",
+ "label": "GZ compressed bgeo (.bgeo.gz)"
+ },
+ {
+ "value": "bgeo.lzma",
+ "label": "LZMA compressed bgeo (.bgeo.lzma)"
+ },
+ {
+ "value": "bgeo.bz2",
+ "label": "BZip2 compressed bgeo (.bgeo.bz2)"
+ }
+ ]
+
+ return attrs + [
+ EnumDef("bgeo_type", bgeo_enum, label="BGEO Options"),
+ ]
diff --git a/openpype/hosts/houdini/plugins/create/create_hda.py b/openpype/hosts/houdini/plugins/create/create_hda.py
index 5f95b2efb4..c4093bfbc6 100644
--- a/openpype/hosts/houdini/plugins/create/create_hda.py
+++ b/openpype/hosts/houdini/plugins/create/create_hda.py
@@ -4,7 +4,6 @@ from openpype.client import (
get_asset_by_name,
get_subsets,
)
-from openpype.pipeline import legacy_io
from openpype.hosts.houdini.api import plugin
@@ -21,7 +20,7 @@ class CreateHDA(plugin.HoudiniCreator):
# type: (str) -> bool
"""Check if existing subset name versions already exists."""
# Get all subsets of the current asset
- project_name = legacy_io.active_project()
+ project_name = self.project_name
asset_doc = get_asset_by_name(
project_name, self.data["asset"], fields=["_id"]
)
diff --git a/openpype/hosts/houdini/plugins/create/create_karma_rop.py b/openpype/hosts/houdini/plugins/create/create_karma_rop.py
index edfb992e1a..4e1360ca45 100644
--- a/openpype/hosts/houdini/plugins/create/create_karma_rop.py
+++ b/openpype/hosts/houdini/plugins/create/create_karma_rop.py
@@ -11,7 +11,6 @@ class CreateKarmaROP(plugin.HoudiniCreator):
label = "Karma ROP"
family = "karma_rop"
icon = "magic"
- defaults = ["master"]
def create(self, subset_name, instance_data, pre_create_data):
import hou # noqa
@@ -21,7 +20,7 @@ class CreateKarmaROP(plugin.HoudiniCreator):
# Add chunk size attribute
instance_data["chunkSize"] = 10
# Submit for job publishing
- instance_data["farm"] = True
+ instance_data["farm"] = pre_create_data.get("farm")
instance = super(CreateKarmaROP, self).create(
subset_name,
@@ -67,6 +66,7 @@ class CreateKarmaROP(plugin.HoudiniCreator):
camera = None
for node in self.selected_nodes:
if node.type().name() == "cam":
+ camera = node.path()
has_camera = pre_create_data.get("cam_res")
if has_camera:
res_x = node.evalParm("resx")
@@ -96,6 +96,9 @@ class CreateKarmaROP(plugin.HoudiniCreator):
]
return attrs + [
+ BoolDef("farm",
+ label="Submitting to Farm",
+ default=True),
EnumDef("image_format",
image_format_enum,
default="exr",
diff --git a/openpype/hosts/houdini/plugins/create/create_mantra_rop.py b/openpype/hosts/houdini/plugins/create/create_mantra_rop.py
index 5ca53e96de..d2f0e735a8 100644
--- a/openpype/hosts/houdini/plugins/create/create_mantra_rop.py
+++ b/openpype/hosts/houdini/plugins/create/create_mantra_rop.py
@@ -11,7 +11,6 @@ class CreateMantraROP(plugin.HoudiniCreator):
label = "Mantra ROP"
family = "mantra_rop"
icon = "magic"
- defaults = ["master"]
def create(self, subset_name, instance_data, pre_create_data):
import hou # noqa
@@ -21,7 +20,7 @@ class CreateMantraROP(plugin.HoudiniCreator):
# Add chunk size attribute
instance_data["chunkSize"] = 10
# Submit for job publishing
- instance_data["farm"] = True
+ instance_data["farm"] = pre_create_data.get("farm")
instance = super(CreateMantraROP, self).create(
subset_name,
@@ -76,6 +75,9 @@ class CreateMantraROP(plugin.HoudiniCreator):
]
return attrs + [
+ BoolDef("farm",
+ label="Submitting to Farm",
+ default=True),
EnumDef("image_format",
image_format_enum,
default="exr",
diff --git a/openpype/hosts/houdini/plugins/create/create_pointcache.py b/openpype/hosts/houdini/plugins/create/create_pointcache.py
index df74070fee..7eaf2aff2b 100644
--- a/openpype/hosts/houdini/plugins/create/create_pointcache.py
+++ b/openpype/hosts/houdini/plugins/create/create_pointcache.py
@@ -1,7 +1,6 @@
# -*- coding: utf-8 -*-
"""Creator plugin for creating pointcache alembics."""
from openpype.hosts.houdini.api import plugin
-from openpype.pipeline import CreatedInstance
import hou
@@ -9,20 +8,18 @@ import hou
class CreatePointCache(plugin.HoudiniCreator):
"""Alembic ROP to pointcache"""
identifier = "io.openpype.creators.houdini.pointcache"
- label = "Point Cache"
+ label = "PointCache (Abc)"
family = "pointcache"
icon = "gears"
def create(self, subset_name, instance_data, pre_create_data):
- import hou
-
instance_data.pop("active", None)
instance_data.update({"node_type": "alembic"})
instance = super(CreatePointCache, self).create(
subset_name,
instance_data,
- pre_create_data) # type: CreatedInstance
+ pre_create_data)
instance_node = hou.node(instance.get("instance_node"))
parms = {
@@ -37,13 +34,44 @@ class CreatePointCache(plugin.HoudiniCreator):
}
if self.selected_nodes:
- parms["sop_path"] = self.selected_nodes[0].path()
+ selected_node = self.selected_nodes[0]
- # try to find output node
- for child in self.selected_nodes[0].children():
- if child.type().name() == "output":
- parms["sop_path"] = child.path()
- break
+ # Although Houdini allows ObjNode path on `sop_path` for the
+ # the ROP node we prefer it set to the SopNode path explicitly
+
+ # Allow sop level paths (e.g. /obj/geo1/box1)
+ if isinstance(selected_node, hou.SopNode):
+ parms["sop_path"] = selected_node.path()
+ self.log.debug(
+ "Valid SopNode selection, 'SOP Path' in ROP will be set to '%s'."
+ % selected_node.path()
+ )
+
+ # Allow object level paths to Geometry nodes (e.g. /obj/geo1)
+ # but do not allow other object level nodes types like cameras, etc.
+ elif isinstance(selected_node, hou.ObjNode) and \
+ selected_node.type().name() in ["geo"]:
+
+ # get the output node with the minimum
+ # 'outputidx' or the node with display flag
+ sop_path = self.get_obj_output(selected_node)
+
+ if sop_path:
+ parms["sop_path"] = sop_path.path()
+ self.log.debug(
+ "Valid ObjNode selection, 'SOP Path' in ROP will be set to "
+ "the child path '%s'."
+ % sop_path.path()
+ )
+
+ if not parms.get("sop_path", None):
+ self.log.debug(
+ "Selection isn't valid. 'SOP Path' in ROP will be empty."
+ )
+ else:
+ self.log.debug(
+ "No Selection. 'SOP Path' in ROP will be empty."
+ )
instance_node.setParms(parms)
instance_node.parm("trange").set(1)
@@ -57,3 +85,23 @@ class CreatePointCache(plugin.HoudiniCreator):
hou.ropNodeTypeCategory(),
hou.sopNodeTypeCategory()
]
+
+ def get_obj_output(self, obj_node):
+ """Find output node with the smallest 'outputidx'."""
+
+ outputs = obj_node.subnetOutputs()
+
+ # if obj_node is empty
+ if not outputs:
+ return
+
+ # if obj_node has one output child whether its
+ # sop output node or a node with the render flag
+ elif len(outputs) == 1:
+ return outputs[0]
+
+ # if there are more than one, then it have multiple ouput nodes
+ # return the one with the minimum 'outputidx'
+ else:
+ return min(outputs,
+ key=lambda node: node.evalParm('outputidx'))
diff --git a/openpype/hosts/houdini/plugins/create/create_redshift_proxy.py b/openpype/hosts/houdini/plugins/create/create_redshift_proxy.py
index 8b6a68437b..b814dd9d57 100644
--- a/openpype/hosts/houdini/plugins/create/create_redshift_proxy.py
+++ b/openpype/hosts/houdini/plugins/create/create_redshift_proxy.py
@@ -33,7 +33,7 @@ class CreateRedshiftProxy(plugin.HoudiniCreator):
instance_node = hou.node(instance.get("instance_node"))
parms = {
- "RS_archive_file": '$HIP/pyblish/`{}.$F4.rs'.format(subset_name),
+ "RS_archive_file": '$HIP/pyblish/{}.$F4.rs'.format(subset_name),
}
if self.selected_nodes:
diff --git a/openpype/hosts/houdini/plugins/create/create_redshift_rop.py b/openpype/hosts/houdini/plugins/create/create_redshift_rop.py
index 4576e9a721..1b8826a932 100644
--- a/openpype/hosts/houdini/plugins/create/create_redshift_rop.py
+++ b/openpype/hosts/houdini/plugins/create/create_redshift_rop.py
@@ -3,7 +3,7 @@
import hou # noqa
from openpype.hosts.houdini.api import plugin
-from openpype.lib import EnumDef
+from openpype.lib import EnumDef, BoolDef
class CreateRedshiftROP(plugin.HoudiniCreator):
@@ -13,7 +13,6 @@ class CreateRedshiftROP(plugin.HoudiniCreator):
label = "Redshift ROP"
family = "redshift_rop"
icon = "magic"
- defaults = ["master"]
ext = "exr"
def create(self, subset_name, instance_data, pre_create_data):
@@ -23,7 +22,7 @@ class CreateRedshiftROP(plugin.HoudiniCreator):
# Add chunk size attribute
instance_data["chunkSize"] = 10
# Submit for job publishing
- instance_data["farm"] = True
+ instance_data["farm"] = pre_create_data.get("farm")
instance = super(CreateRedshiftROP, self).create(
subset_name,
@@ -100,6 +99,9 @@ class CreateRedshiftROP(plugin.HoudiniCreator):
]
return attrs + [
+ BoolDef("farm",
+ label="Submitting to Farm",
+ default=True),
EnumDef("image_format",
image_format_enum,
default=self.ext,
diff --git a/openpype/hosts/houdini/plugins/create/create_review.py b/openpype/hosts/houdini/plugins/create/create_review.py
index ab06b30c35..60c34a358b 100644
--- a/openpype/hosts/houdini/plugins/create/create_review.py
+++ b/openpype/hosts/houdini/plugins/create/create_review.py
@@ -3,6 +3,9 @@
from openpype.hosts.houdini.api import plugin
from openpype.lib import EnumDef, BoolDef, NumberDef
+import os
+import hou
+
class CreateReview(plugin.HoudiniCreator):
"""Review with OpenGL ROP"""
@@ -13,7 +16,6 @@ class CreateReview(plugin.HoudiniCreator):
icon = "video-camera"
def create(self, subset_name, instance_data, pre_create_data):
- import hou
instance_data.pop("active", None)
instance_data.update({"node_type": "opengl"})
@@ -82,6 +84,11 @@ class CreateReview(plugin.HoudiniCreator):
instance_node.setParms(parms)
+ # Set OCIO Colorspace to the default output colorspace
+ # if there's OCIO
+ if os.getenv("OCIO"):
+ self.set_colorcorrect_to_default_view_space(instance_node)
+
to_lock = ["id", "family"]
self.lock_parameters(instance_node, to_lock)
@@ -123,3 +130,23 @@ class CreateReview(plugin.HoudiniCreator):
minimum=0.0001,
decimals=3)
]
+
+ def set_colorcorrect_to_default_view_space(self,
+ instance_node):
+ """Set ociocolorspace to the default output space."""
+ from openpype.hosts.houdini.api.colorspace import get_default_display_view_colorspace # noqa
+
+ # set Color Correction parameter to OpenColorIO
+ instance_node.setParms({"colorcorrect": 2})
+
+ # Get default view space for ociocolorspace parm.
+ default_view_space = get_default_display_view_colorspace()
+ instance_node.setParms(
+ {"ociocolorspace": default_view_space}
+ )
+
+ self.log.debug(
+ "'OCIO Colorspace' parm on '{}' has been set to "
+ "the default view color space '{}'"
+ .format(instance_node, default_view_space)
+ )
diff --git a/openpype/hosts/houdini/plugins/create/create_staticmesh.py b/openpype/hosts/houdini/plugins/create/create_staticmesh.py
new file mode 100644
index 0000000000..ea0b36f03f
--- /dev/null
+++ b/openpype/hosts/houdini/plugins/create/create_staticmesh.py
@@ -0,0 +1,143 @@
+# -*- coding: utf-8 -*-
+"""Creator for Unreal Static Meshes."""
+from openpype.hosts.houdini.api import plugin
+from openpype.lib import BoolDef, EnumDef
+
+import hou
+
+
+class CreateStaticMesh(plugin.HoudiniCreator):
+ """Static Meshes as FBX. """
+
+ identifier = "io.openpype.creators.houdini.staticmesh.fbx"
+ label = "Static Mesh (FBX)"
+ family = "staticMesh"
+ icon = "fa5s.cubes"
+
+ default_variants = ["Main"]
+
+ def create(self, subset_name, instance_data, pre_create_data):
+
+ instance_data.update({"node_type": "filmboxfbx"})
+
+ instance = super(CreateStaticMesh, self).create(
+ subset_name,
+ instance_data,
+ pre_create_data)
+
+ # get the created rop node
+ instance_node = hou.node(instance.get("instance_node"))
+
+ # prepare parms
+ output_path = hou.text.expandString(
+ "$HIP/pyblish/{}.fbx".format(subset_name)
+ )
+
+ parms = {
+ "startnode": self.get_selection(),
+ "sopoutput": output_path,
+ # vertex cache format
+ "vcformat": pre_create_data.get("vcformat"),
+ "convertunits": pre_create_data.get("convertunits"),
+ # set render range to use frame range start-end frame
+ "trange": 1,
+ "createsubnetroot": pre_create_data.get("createsubnetroot")
+ }
+
+ # set parms
+ instance_node.setParms(parms)
+
+ # Lock any parameters in this list
+ to_lock = ["family", "id"]
+ self.lock_parameters(instance_node, to_lock)
+
+ def get_network_categories(self):
+ return [
+ hou.ropNodeTypeCategory(),
+ hou.sopNodeTypeCategory()
+ ]
+
+ def get_pre_create_attr_defs(self):
+ """Add settings for users. """
+
+ attrs = super(CreateStaticMesh, self).get_pre_create_attr_defs()
+ createsubnetroot = BoolDef("createsubnetroot",
+ tooltip="Create an extra root for the "
+ "Export node when it's a "
+ "subnetwork. This causes the "
+ "exporting subnetwork node to be "
+ "represented in the FBX file.",
+ default=False,
+ label="Create Root for Subnet")
+ vcformat = EnumDef("vcformat",
+ items={
+ 0: "Maya Compatible (MC)",
+ 1: "3DS MAX Compatible (PC2)"
+ },
+ default=0,
+ label="Vertex Cache Format")
+ convert_units = BoolDef("convertunits",
+ tooltip="When on, the FBX is converted"
+ "from the current Houdini "
+ "system units to the native "
+ "FBX unit of centimeters.",
+ default=False,
+ label="Convert Units")
+
+ return attrs + [createsubnetroot, vcformat, convert_units]
+
+ def get_dynamic_data(
+ self, variant, task_name, asset_doc, project_name, host_name, instance
+ ):
+ """
+ The default subset name templates for Unreal include {asset} and thus
+ we should pass that along as dynamic data.
+ """
+ dynamic_data = super(CreateStaticMesh, self).get_dynamic_data(
+ variant, task_name, asset_doc, project_name, host_name, instance
+ )
+ dynamic_data["asset"] = asset_doc["name"]
+ return dynamic_data
+
+ def get_selection(self):
+ """Selection Logic.
+
+ how self.selected_nodes should be processed to get
+ the desirable node from selection.
+
+ Returns:
+ str : node path
+ """
+
+ selection = ""
+
+ if self.selected_nodes:
+ selected_node = self.selected_nodes[0]
+
+ # Accept sop level nodes (e.g. /obj/geo1/box1)
+ if isinstance(selected_node, hou.SopNode):
+ selection = selected_node.path()
+ self.log.debug(
+ "Valid SopNode selection, 'Export' in filmboxfbx"
+ " will be set to '%s'.", selected_node
+ )
+
+ # Accept object level nodes (e.g. /obj/geo1)
+ elif isinstance(selected_node, hou.ObjNode):
+ selection = selected_node.path()
+ self.log.debug(
+ "Valid ObjNode selection, 'Export' in filmboxfbx "
+ "will be set to the child path '%s'.", selection
+ )
+
+ else:
+ self.log.debug(
+ "Selection isn't valid. 'Export' in "
+ "filmboxfbx will be empty."
+ )
+ else:
+ self.log.debug(
+ "No Selection. 'Export' in filmboxfbx will be empty."
+ )
+
+ return selection
diff --git a/openpype/hosts/houdini/plugins/create/create_vbd_cache.py b/openpype/hosts/houdini/plugins/create/create_vbd_cache.py
index c015cebd49..9c96e48e3a 100644
--- a/openpype/hosts/houdini/plugins/create/create_vbd_cache.py
+++ b/openpype/hosts/houdini/plugins/create/create_vbd_cache.py
@@ -33,7 +33,7 @@ class CreateVDBCache(plugin.HoudiniCreator):
}
if self.selected_nodes:
- parms["soppath"] = self.selected_nodes[0].path()
+ parms["soppath"] = self.get_sop_node_path(self.selected_nodes[0])
instance_node.setParms(parms)
@@ -42,3 +42,63 @@ class CreateVDBCache(plugin.HoudiniCreator):
hou.ropNodeTypeCategory(),
hou.sopNodeTypeCategory()
]
+
+ def get_sop_node_path(self, selected_node):
+ """Get Sop Path of the selected node.
+
+ Although Houdini allows ObjNode path on `sop_path` for the
+ the ROP node, we prefer it set to the SopNode path explicitly.
+ """
+
+ # Allow sop level paths (e.g. /obj/geo1/box1)
+ if isinstance(selected_node, hou.SopNode):
+ self.log.debug(
+ "Valid SopNode selection, 'SOP Path' in ROP will"
+ " be set to '%s'.", selected_node.path()
+ )
+ return selected_node.path()
+
+ # Allow object level paths to Geometry nodes (e.g. /obj/geo1)
+ # but do not allow other object level nodes types like cameras, etc.
+ elif isinstance(selected_node, hou.ObjNode) and \
+ selected_node.type().name() == "geo":
+
+ # Try to find output node.
+ sop_node = self.get_obj_output(selected_node)
+ if sop_node:
+ self.log.debug(
+ "Valid ObjNode selection, 'SOP Path' in ROP will "
+ "be set to the child path '%s'.", sop_node.path()
+ )
+ return sop_node.path()
+
+ self.log.debug(
+ "Selection isn't valid. 'SOP Path' in ROP will be empty."
+ )
+ return ""
+
+ def get_obj_output(self, obj_node):
+ """Try to find output node.
+
+ If any output nodes are present, return the output node with
+ the minimum 'outputidx'
+ If no output nodes are present, return the node with display flag
+ If no nodes are present at all, return None
+ """
+
+ outputs = obj_node.subnetOutputs()
+
+ # if obj_node is empty
+ if not outputs:
+ return
+
+ # if obj_node has one output child whether its
+ # sop output node or a node with the render flag
+ elif len(outputs) == 1:
+ return outputs[0]
+
+ # if there are more than one, then it has multiple output nodes
+ # return the one with the minimum 'outputidx'
+ else:
+ return min(outputs,
+ key=lambda node: node.evalParm('outputidx'))
diff --git a/openpype/hosts/houdini/plugins/create/create_vray_rop.py b/openpype/hosts/houdini/plugins/create/create_vray_rop.py
index 1de9be4ed6..793a544fdf 100644
--- a/openpype/hosts/houdini/plugins/create/create_vray_rop.py
+++ b/openpype/hosts/houdini/plugins/create/create_vray_rop.py
@@ -14,8 +14,6 @@ class CreateVrayROP(plugin.HoudiniCreator):
label = "VRay ROP"
family = "vray_rop"
icon = "magic"
- defaults = ["master"]
-
ext = "exr"
def create(self, subset_name, instance_data, pre_create_data):
@@ -25,7 +23,7 @@ class CreateVrayROP(plugin.HoudiniCreator):
# Add chunk size attribute
instance_data["chunkSize"] = 10
# Submit for job publishing
- instance_data["farm"] = True
+ instance_data["farm"] = pre_create_data.get("farm")
instance = super(CreateVrayROP, self).create(
subset_name,
@@ -139,6 +137,9 @@ class CreateVrayROP(plugin.HoudiniCreator):
]
return attrs + [
+ BoolDef("farm",
+ label="Submitting to Farm",
+ default=True),
EnumDef("image_format",
image_format_enum,
default=self.ext,
diff --git a/openpype/hosts/houdini/plugins/create/create_workfile.py b/openpype/hosts/houdini/plugins/create/create_workfile.py
index 1a8537adcd..cc45a6c2a8 100644
--- a/openpype/hosts/houdini/plugins/create/create_workfile.py
+++ b/openpype/hosts/houdini/plugins/create/create_workfile.py
@@ -4,7 +4,6 @@ from openpype.hosts.houdini.api import plugin
from openpype.hosts.houdini.api.lib import read, imprint
from openpype.hosts.houdini.api.pipeline import CONTEXT_CONTAINER
from openpype.pipeline import CreatedInstance, AutoCreator
-from openpype.pipeline import legacy_io
from openpype.client import get_asset_by_name
import hou
@@ -27,9 +26,9 @@ class CreateWorkfile(plugin.HoudiniCreatorBase, AutoCreator):
), None)
project_name = self.project_name
- asset_name = legacy_io.Session["AVALON_ASSET"]
- task_name = legacy_io.Session["AVALON_TASK"]
- host_name = legacy_io.Session["AVALON_APP"]
+ asset_name = self.create_context.get_current_asset_name()
+ task_name = self.create_context.get_current_task_name()
+ host_name = self.host_name
if current_instance is None:
asset_doc = get_asset_by_name(project_name, asset_name)
diff --git a/openpype/hosts/houdini/plugins/inventory/set_camera_resolution.py b/openpype/hosts/houdini/plugins/inventory/set_camera_resolution.py
new file mode 100644
index 0000000000..18ececb019
--- /dev/null
+++ b/openpype/hosts/houdini/plugins/inventory/set_camera_resolution.py
@@ -0,0 +1,26 @@
+from openpype.pipeline import InventoryAction
+from openpype.hosts.houdini.api.lib import (
+ get_camera_from_container,
+ set_camera_resolution
+)
+from openpype.pipeline.context_tools import get_current_project_asset
+
+
+class SetCameraResolution(InventoryAction):
+
+ label = "Set Camera Resolution"
+ icon = "desktop"
+ color = "orange"
+
+ @staticmethod
+ def is_compatible(container):
+ return (
+ container.get("loader") == "CameraLoader"
+ )
+
+ def process(self, containers):
+ asset_doc = get_current_project_asset()
+ for container in containers:
+ node = container["node"]
+ camera = get_camera_from_container(node)
+ set_camera_resolution(camera, asset_doc)
diff --git a/openpype/hosts/houdini/plugins/load/load_alembic.py b/openpype/hosts/houdini/plugins/load/load_alembic.py
index c6f0ebf2f9..48bd730ebe 100644
--- a/openpype/hosts/houdini/plugins/load/load_alembic.py
+++ b/openpype/hosts/houdini/plugins/load/load_alembic.py
@@ -20,7 +20,8 @@ class AbcLoader(load.LoaderPlugin):
import hou
# Format file name, Houdini only wants forward slashes
- file_path = os.path.normpath(self.fname)
+ file_path = self.filepath_from_context(context)
+ file_path = os.path.normpath(file_path)
file_path = file_path.replace("\\", "/")
# Get the root node
diff --git a/openpype/hosts/houdini/plugins/load/load_alembic_archive.py b/openpype/hosts/houdini/plugins/load/load_alembic_archive.py
index 47d2e1b896..3a577f72b4 100644
--- a/openpype/hosts/houdini/plugins/load/load_alembic_archive.py
+++ b/openpype/hosts/houdini/plugins/load/load_alembic_archive.py
@@ -21,7 +21,8 @@ class AbcArchiveLoader(load.LoaderPlugin):
import hou
# Format file name, Houdini only wants forward slashes
- file_path = os.path.normpath(self.fname)
+ file_path = self.filepath_from_context(context)
+ file_path = os.path.normpath(file_path)
file_path = file_path.replace("\\", "/")
# Get the root node
diff --git a/openpype/hosts/houdini/plugins/load/load_bgeo.py b/openpype/hosts/houdini/plugins/load/load_bgeo.py
index 86e8675c02..489bf944ed 100644
--- a/openpype/hosts/houdini/plugins/load/load_bgeo.py
+++ b/openpype/hosts/houdini/plugins/load/load_bgeo.py
@@ -34,7 +34,6 @@ class BgeoLoader(load.LoaderPlugin):
# Create a new geo node
container = obj.createNode("geo", node_name=node_name)
- is_sequence = bool(context["representation"]["context"].get("frame"))
# Remove the file node, it only loads static meshes
# Houdini 17 has removed the file node from the geo node
@@ -43,9 +42,10 @@ class BgeoLoader(load.LoaderPlugin):
file_node.destroy()
# Explicitly create a file node
+ path = self.filepath_from_context(context)
file_node = container.createNode("file", node_name=node_name)
file_node.setParms(
- {"file": self.format_path(self.fname, context["representation"])})
+ {"file": self.format_path(path, context["representation"])})
# Set display on last node
file_node.setDisplayFlag(True)
diff --git a/openpype/hosts/houdini/plugins/load/load_camera.py b/openpype/hosts/houdini/plugins/load/load_camera.py
index 6365508f4e..e16146a267 100644
--- a/openpype/hosts/houdini/plugins/load/load_camera.py
+++ b/openpype/hosts/houdini/plugins/load/load_camera.py
@@ -4,6 +4,13 @@ from openpype.pipeline import (
)
from openpype.hosts.houdini.api import pipeline
+from openpype.hosts.houdini.api.lib import (
+ set_camera_resolution,
+ get_camera_from_container
+)
+
+import hou
+
ARCHIVE_EXPRESSION = ('__import__("_alembic_hom_extensions")'
'.alembicGetCameraDict')
@@ -25,7 +32,15 @@ def transfer_non_default_values(src, dest, ignore=None):
channel expression and ignore certain Parm types.
"""
- import hou
+
+ ignore_types = {
+ hou.parmTemplateType.Toggle,
+ hou.parmTemplateType.Menu,
+ hou.parmTemplateType.Button,
+ hou.parmTemplateType.FolderSet,
+ hou.parmTemplateType.Separator,
+ hou.parmTemplateType.Label,
+ }
src.updateParmStates()
@@ -62,14 +77,6 @@ def transfer_non_default_values(src, dest, ignore=None):
continue
# Ignore folders, separators, etc.
- ignore_types = {
- hou.parmTemplateType.Toggle,
- hou.parmTemplateType.Menu,
- hou.parmTemplateType.Button,
- hou.parmTemplateType.FolderSet,
- hou.parmTemplateType.Separator,
- hou.parmTemplateType.Label,
- }
if parm.parmTemplate().type() in ignore_types:
continue
@@ -90,12 +97,8 @@ class CameraLoader(load.LoaderPlugin):
def load(self, context, name=None, namespace=None, data=None):
- import os
- import hou
-
# Format file name, Houdini only wants forward slashes
- file_path = os.path.normpath(self.fname)
- file_path = file_path.replace("\\", "/")
+ file_path = self.filepath_from_context(context).replace("\\", "/")
# Get the root node
obj = hou.node("/obj")
@@ -105,19 +108,21 @@ class CameraLoader(load.LoaderPlugin):
node_name = "{}_{}".format(namespace, name) if namespace else name
# Create a archive node
- container = self.create_and_connect(obj, "alembicarchive", node_name)
+ node = self.create_and_connect(obj, "alembicarchive", node_name)
# TODO: add FPS of project / asset
- container.setParms({"fileName": file_path,
- "channelRef": True})
+ node.setParms({"fileName": file_path, "channelRef": True})
# Apply some magic
- container.parm("buildHierarchy").pressButton()
- container.moveToGoodPosition()
+ node.parm("buildHierarchy").pressButton()
+ node.moveToGoodPosition()
# Create an alembic xform node
- nodes = [container]
+ nodes = [node]
+ camera = get_camera_from_container(node)
+ self._match_maya_render_mask(camera)
+ set_camera_resolution(camera, asset_doc=context["asset"])
self[:] = nodes
return pipeline.containerise(node_name,
@@ -142,14 +147,14 @@ class CameraLoader(load.LoaderPlugin):
# Store the cam temporarily next to the Alembic Archive
# so that we can preserve parm values the user set on it
# after build hierarchy was triggered.
- old_camera = self._get_camera(node)
+ old_camera = get_camera_from_container(node)
temp_camera = old_camera.copyTo(node.parent())
# Rebuild
node.parm("buildHierarchy").pressButton()
# Apply values to the new camera
- new_camera = self._get_camera(node)
+ new_camera = get_camera_from_container(node)
transfer_non_default_values(temp_camera,
new_camera,
# The hidden uniform scale attribute
@@ -157,6 +162,9 @@ class CameraLoader(load.LoaderPlugin):
# "icon_scale" just skip that completely
ignore={"scale"})
+ self._match_maya_render_mask(new_camera)
+ set_camera_resolution(new_camera)
+
temp_camera.destroy()
def remove(self, container):
@@ -164,15 +172,6 @@ class CameraLoader(load.LoaderPlugin):
node = container["node"]
node.destroy()
- def _get_camera(self, node):
- import hou
- cameras = node.recursiveGlob("*",
- filter=hou.nodeTypeFilter.ObjCamera,
- include_subnets=False)
-
- assert len(cameras) == 1, "Camera instance must have only one camera"
- return cameras[0]
-
def create_and_connect(self, node, node_type, name=None):
"""Create a node within a node which and connect it to the input
@@ -193,5 +192,20 @@ class CameraLoader(load.LoaderPlugin):
new_node.moveToGoodPosition()
return new_node
- def switch(self, container, representation):
- self.update(container, representation)
+ def _match_maya_render_mask(self, camera):
+ """Workaround to match Maya render mask in Houdini"""
+
+ # print("Setting match maya render mask ")
+ parm = camera.parm("aperture")
+ expression = parm.expression()
+ expression = expression.replace("return ", "aperture = ")
+ expression += """
+# Match maya render mask (logic from Houdini's own FBX importer)
+node = hou.pwd()
+resx = node.evalParm('resx')
+resy = node.evalParm('resy')
+aspect = node.evalParm('aspect')
+aperture *= min(1, (resx / resy * aspect) / 1.5)
+return aperture
+"""
+ parm.setExpression(expression, language=hou.exprLanguage.Python)
diff --git a/openpype/hosts/houdini/plugins/load/load_fbx.py b/openpype/hosts/houdini/plugins/load/load_fbx.py
new file mode 100644
index 0000000000..cac22d62d4
--- /dev/null
+++ b/openpype/hosts/houdini/plugins/load/load_fbx.py
@@ -0,0 +1,139 @@
+# -*- coding: utf-8 -*-
+"""Fbx Loader for houdini. """
+from openpype.pipeline import (
+ load,
+ get_representation_path,
+)
+from openpype.hosts.houdini.api import pipeline
+
+
+class FbxLoader(load.LoaderPlugin):
+ """Load fbx files. """
+
+ label = "Load FBX"
+ icon = "code-fork"
+ color = "orange"
+
+ order = -10
+
+ families = ["staticMesh", "fbx"]
+ representations = ["fbx"]
+
+ def load(self, context, name=None, namespace=None, data=None):
+
+ # get file path from context
+ file_path = self.filepath_from_context(context)
+ file_path = file_path.replace("\\", "/")
+
+ # get necessary data
+ namespace, node_name = self.get_node_name(context, name, namespace)
+
+ # create load tree
+ nodes = self.create_load_node_tree(file_path, node_name, name)
+
+ self[:] = nodes
+
+ # Call containerise function which does some automations for you
+ # like moving created nodes to the AVALON_CONTAINERS subnetwork
+ containerised_nodes = pipeline.containerise(
+ node_name,
+ namespace,
+ nodes,
+ context,
+ self.__class__.__name__,
+ suffix="",
+ )
+
+ return containerised_nodes
+
+ def update(self, container, representation):
+
+ node = container["node"]
+ try:
+ file_node = next(
+ n for n in node.children() if n.type().name() == "file"
+ )
+ except StopIteration:
+ self.log.error("Could not find node of type `file`")
+ return
+
+ # Update the file path from representation
+ file_path = get_representation_path(representation)
+ file_path = file_path.replace("\\", "/")
+
+ file_node.setParms({"file": file_path})
+
+ # Update attribute
+ node.setParms({"representation": str(representation["_id"])})
+
+ def remove(self, container):
+
+ node = container["node"]
+ node.destroy()
+
+ def switch(self, container, representation):
+ self.update(container, representation)
+
+ def get_node_name(self, context, name=None, namespace=None):
+ """Define node name."""
+
+ if not namespace:
+ namespace = context["asset"]["name"]
+
+ if namespace:
+ node_name = "{}_{}".format(namespace, name)
+ else:
+ node_name = name
+
+ return namespace, node_name
+
+ def create_load_node_tree(self, file_path, node_name, subset_name):
+ """Create Load network.
+
+ you can start building your tree at any obj level.
+ it'll be much easier to build it in the root obj level.
+
+ Afterwards, your tree will be automatically moved to
+ '/obj/AVALON_CONTAINERS' subnetwork.
+ """
+ import hou
+
+ # Get the root obj level
+ obj = hou.node("/obj")
+
+ # Create a new obj geo node
+ parent_node = obj.createNode("geo", node_name=node_name)
+
+ # In older houdini,
+ # when reating a new obj geo node, a default file node will be
+ # automatically created.
+ # so, we will delete it if exists.
+ file_node = parent_node.node("file1")
+ if file_node:
+ file_node.destroy()
+
+ # Create a new file node
+ file_node = parent_node.createNode("file", node_name=node_name)
+ file_node.setParms({"file": file_path})
+
+ # Create attribute delete
+ attribdelete_name = "attribdelete_{}".format(subset_name)
+ attribdelete = parent_node.createNode("attribdelete",
+ node_name=attribdelete_name)
+ attribdelete.setParms({"ptdel": "fbx_*"})
+ attribdelete.setInput(0, file_node)
+
+ # Create a Null node
+ null_name = "OUT_{}".format(subset_name)
+ null = parent_node.createNode("null", node_name=null_name)
+ null.setInput(0, attribdelete)
+
+ # Ensure display flag is on the file_node input node and not on the OUT
+ # node to optimize "debug" displaying in the viewport.
+ file_node.setDisplayFlag(True)
+
+ # Set new position for children nodes
+ parent_node.layoutChildren()
+
+ # Return all the nodes
+ return [parent_node, file_node, attribdelete, null]
diff --git a/openpype/hosts/houdini/plugins/load/load_hda.py b/openpype/hosts/houdini/plugins/load/load_hda.py
index 2438570c6e..9630716253 100644
--- a/openpype/hosts/houdini/plugins/load/load_hda.py
+++ b/openpype/hosts/houdini/plugins/load/load_hda.py
@@ -21,7 +21,8 @@ class HdaLoader(load.LoaderPlugin):
import hou
# Format file name, Houdini only wants forward slashes
- file_path = os.path.normpath(self.fname)
+ file_path = self.filepath_from_context(context)
+ file_path = os.path.normpath(file_path)
file_path = file_path.replace("\\", "/")
# Get the root node
@@ -58,6 +59,9 @@ class HdaLoader(load.LoaderPlugin):
def_paths = [d.libraryFilePath() for d in defs]
new = def_paths.index(file_path)
defs[new].setIsPreferred(True)
+ hda_node.setParms({
+ "representation": str(representation["_id"])
+ })
def remove(self, container):
node = container["node"]
diff --git a/openpype/hosts/houdini/plugins/load/load_image.py b/openpype/hosts/houdini/plugins/load/load_image.py
index 26bc569c53..663a93e48b 100644
--- a/openpype/hosts/houdini/plugins/load/load_image.py
+++ b/openpype/hosts/houdini/plugins/load/load_image.py
@@ -55,7 +55,8 @@ class ImageLoader(load.LoaderPlugin):
def load(self, context, name=None, namespace=None, data=None):
# Format file name, Houdini only wants forward slashes
- file_path = os.path.normpath(self.fname)
+ file_path = self.filepath_from_context(context)
+ file_path = os.path.normpath(file_path)
file_path = file_path.replace("\\", "/")
file_path = self._get_file_sequence(file_path)
diff --git a/openpype/hosts/houdini/plugins/load/load_usd_layer.py b/openpype/hosts/houdini/plugins/load/load_usd_layer.py
index 1f0ec25128..1528cf549f 100644
--- a/openpype/hosts/houdini/plugins/load/load_usd_layer.py
+++ b/openpype/hosts/houdini/plugins/load/load_usd_layer.py
@@ -26,7 +26,8 @@ class USDSublayerLoader(load.LoaderPlugin):
import hou
# Format file name, Houdini only wants forward slashes
- file_path = os.path.normpath(self.fname)
+ file_path = self.filepath_from_context(context)
+ file_path = os.path.normpath(file_path)
file_path = file_path.replace("\\", "/")
# Get the root node
diff --git a/openpype/hosts/houdini/plugins/load/load_usd_reference.py b/openpype/hosts/houdini/plugins/load/load_usd_reference.py
index f66d05395e..8402ad072c 100644
--- a/openpype/hosts/houdini/plugins/load/load_usd_reference.py
+++ b/openpype/hosts/houdini/plugins/load/load_usd_reference.py
@@ -26,7 +26,8 @@ class USDReferenceLoader(load.LoaderPlugin):
import hou
# Format file name, Houdini only wants forward slashes
- file_path = os.path.normpath(self.fname)
+ file_path = self.filepath_from_context(context)
+ file_path = os.path.normpath(file_path)
file_path = file_path.replace("\\", "/")
# Get the root node
diff --git a/openpype/hosts/houdini/plugins/load/load_vdb.py b/openpype/hosts/houdini/plugins/load/load_vdb.py
index 87900502c5..bcc4f200d3 100644
--- a/openpype/hosts/houdini/plugins/load/load_vdb.py
+++ b/openpype/hosts/houdini/plugins/load/load_vdb.py
@@ -40,8 +40,9 @@ class VdbLoader(load.LoaderPlugin):
# Explicitly create a file node
file_node = container.createNode("file", node_name=node_name)
+ path = self.filepath_from_context(context)
file_node.setParms(
- {"file": self.format_path(self.fname, context["representation"])})
+ {"file": self.format_path(path, context["representation"])})
# Set display on last node
file_node.setDisplayFlag(True)
diff --git a/openpype/hosts/houdini/plugins/load/show_usdview.py b/openpype/hosts/houdini/plugins/load/show_usdview.py
index 2737bc40fa..7b03a0738a 100644
--- a/openpype/hosts/houdini/plugins/load/show_usdview.py
+++ b/openpype/hosts/houdini/plugins/load/show_usdview.py
@@ -20,7 +20,8 @@ class ShowInUsdview(load.LoaderPlugin):
usdview = find_executable("usdview")
- filepath = os.path.normpath(self.fname)
+ filepath = self.filepath_from_context(context)
+ filepath = os.path.normpath(filepath)
filepath = filepath.replace("\\", "/")
if not os.path.exists(filepath):
diff --git a/openpype/hosts/houdini/plugins/publish/collect_arnold_rop.py b/openpype/hosts/houdini/plugins/publish/collect_arnold_rop.py
index 614785487f..43b8428c60 100644
--- a/openpype/hosts/houdini/plugins/publish/collect_arnold_rop.py
+++ b/openpype/hosts/houdini/plugins/publish/collect_arnold_rop.py
@@ -50,7 +50,7 @@ class CollectArnoldROPRenderProducts(pyblish.api.InstancePlugin):
num_aovs = rop.evalParm("ar_aovs")
for index in range(1, num_aovs + 1):
# Skip disabled AOVs
- if not rop.evalParm("ar_enable_aovP{}".format(index)):
+ if not rop.evalParm("ar_enable_aov{}".format(index)):
continue
if rop.evalParm("ar_aov_exr_enable_layer_name{}".format(index)):
diff --git a/openpype/hosts/houdini/plugins/publish/collect_frames.py b/openpype/hosts/houdini/plugins/publish/collect_frames.py
index 91a3d9d170..01df809d4c 100644
--- a/openpype/hosts/houdini/plugins/publish/collect_frames.py
+++ b/openpype/hosts/houdini/plugins/publish/collect_frames.py
@@ -13,7 +13,8 @@ class CollectFrames(pyblish.api.InstancePlugin):
order = pyblish.api.CollectorOrder + 0.01
label = "Collect Frames"
- families = ["vdbcache", "imagesequence", "ass", "redshiftproxy", "review"]
+ families = ["vdbcache", "imagesequence", "ass",
+ "redshiftproxy", "review", "bgeo"]
def process(self, instance):
@@ -32,9 +33,9 @@ class CollectFrames(pyblish.api.InstancePlugin):
output = output_parm.eval()
_, ext = lib.splitext(
- output,
- allowed_multidot_extensions=[".ass.gz"]
- )
+ output, allowed_multidot_extensions=[
+ ".ass.gz", ".bgeo.sc", ".bgeo.gz",
+ ".bgeo.lzma", ".bgeo.bz2"])
file_name = os.path.basename(output)
result = file_name
@@ -76,7 +77,7 @@ class CollectFrames(pyblish.api.InstancePlugin):
frame = match.group(1)
padding = len(frame)
- # Get the parts of the filename surrounding the frame number
+ # Get the parts of the filename surrounding the frame number,
# so we can put our own frame numbers in.
span = match.span(1)
prefix = match.string[: span[0]]
diff --git a/openpype/hosts/houdini/plugins/publish/collect_output_node.py b/openpype/hosts/houdini/plugins/publish/collect_output_node.py
index 601ed17b39..bca3d9fdc1 100644
--- a/openpype/hosts/houdini/plugins/publish/collect_output_node.py
+++ b/openpype/hosts/houdini/plugins/publish/collect_output_node.py
@@ -1,5 +1,7 @@
import pyblish.api
+from openpype.pipeline.publish import KnownPublishError
+
class CollectOutputSOPPath(pyblish.api.InstancePlugin):
"""Collect the out node's SOP/COP Path value."""
@@ -12,7 +14,8 @@ class CollectOutputSOPPath(pyblish.api.InstancePlugin):
"imagesequence",
"usd",
"usdrender",
- "redshiftproxy"
+ "redshiftproxy",
+ "staticMesh"
]
hosts = ["houdini"]
@@ -57,9 +60,13 @@ class CollectOutputSOPPath(pyblish.api.InstancePlugin):
elif node_type == "Redshift_Proxy_Output":
out_node = node.parm("RS_archive_sopPath").evalAsNode()
+
+ elif node_type == "filmboxfbx":
+ out_node = node.parm("startnode").evalAsNode()
+
else:
- raise ValueError(
- "ROP node type '%s' is" " not supported." % node_type
+ raise KnownPublishError(
+ "ROP node type '{}' is not supported.".format(node_type)
)
if not out_node:
diff --git a/openpype/hosts/houdini/plugins/publish/collect_pointcache_type.py b/openpype/hosts/houdini/plugins/publish/collect_pointcache_type.py
new file mode 100644
index 0000000000..3323e97c20
--- /dev/null
+++ b/openpype/hosts/houdini/plugins/publish/collect_pointcache_type.py
@@ -0,0 +1,21 @@
+"""Collector for pointcache types.
+
+This will add additional family to pointcache instance based on
+the creator_identifier parameter.
+"""
+import pyblish.api
+
+
+class CollectPointcacheType(pyblish.api.InstancePlugin):
+ """Collect data type for pointcache instance."""
+
+ order = pyblish.api.CollectorOrder
+ hosts = ["houdini"]
+ families = ["pointcache"]
+ label = "Collect type of pointcache"
+
+ def process(self, instance):
+ if instance.data["creator_identifier"] == "io.openpype.creators.houdini.bgeo": # noqa: E501
+ instance.data["families"] += ["bgeo"]
+ elif instance.data["creator_identifier"] == "io.openpype.creators.houdini.pointcache": # noqa: E501
+ instance.data["families"] += ["abc"]
diff --git a/openpype/hosts/houdini/plugins/publish/collect_staticmesh_type.py b/openpype/hosts/houdini/plugins/publish/collect_staticmesh_type.py
new file mode 100644
index 0000000000..db9efec7a1
--- /dev/null
+++ b/openpype/hosts/houdini/plugins/publish/collect_staticmesh_type.py
@@ -0,0 +1,20 @@
+# -*- coding: utf-8 -*-
+"""Collector for staticMesh types. """
+
+import pyblish.api
+
+
+class CollectStaticMeshType(pyblish.api.InstancePlugin):
+ """Collect data type for fbx instance."""
+
+ hosts = ["houdini"]
+ families = ["staticMesh"]
+ label = "Collect type of staticMesh"
+
+ order = pyblish.api.CollectorOrder
+
+ def process(self, instance):
+
+ if instance.data["creator_identifier"] == "io.openpype.creators.houdini.staticmesh.fbx": # noqa: E501
+ # Marking this instance as FBX triggers the FBX extractor.
+ instance.data["families"] += ["fbx"]
diff --git a/openpype/hosts/houdini/plugins/publish/collect_usd_bootstrap.py b/openpype/hosts/houdini/plugins/publish/collect_usd_bootstrap.py
index 81274c670e..14a8e3c056 100644
--- a/openpype/hosts/houdini/plugins/publish/collect_usd_bootstrap.py
+++ b/openpype/hosts/houdini/plugins/publish/collect_usd_bootstrap.py
@@ -1,7 +1,6 @@
import pyblish.api
from openpype.client import get_subset_by_name, get_asset_by_name
-from openpype.pipeline import legacy_io
import openpype.lib.usdlib as usdlib
@@ -51,7 +50,7 @@ class CollectUsdBootstrap(pyblish.api.InstancePlugin):
self.log.debug("Add bootstrap for: %s" % bootstrap)
- project_name = legacy_io.active_project()
+ project_name = instance.context.data["projectName"]
asset = get_asset_by_name(project_name, instance.data["asset"])
assert asset, "Asset must exist: %s" % asset
diff --git a/openpype/hosts/houdini/plugins/publish/collect_vray_rop.py b/openpype/hosts/houdini/plugins/publish/collect_vray_rop.py
index d4fe37f993..277f922ba4 100644
--- a/openpype/hosts/houdini/plugins/publish/collect_vray_rop.py
+++ b/openpype/hosts/houdini/plugins/publish/collect_vray_rop.py
@@ -80,14 +80,9 @@ class CollectVrayROPRenderProducts(pyblish.api.InstancePlugin):
def get_beauty_render_product(self, prefix, suffix=""):
"""Return the beauty output filename if render element enabled
"""
+ # Remove aov suffix from the product: `prefix.aov_suffix` -> `prefix`
aov_parm = ".{}".format(suffix)
- beauty_product = None
- if aov_parm in prefix:
- beauty_product = prefix.replace(aov_parm, "")
- else:
- beauty_product = prefix
-
- return beauty_product
+ return prefix.replace(aov_parm, "")
def get_render_element_name(self, node, prefix, suffix=""):
"""Return the output filename using the AOV prefix and suffix
diff --git a/openpype/hosts/houdini/plugins/publish/extract_alembic.py b/openpype/hosts/houdini/plugins/publish/extract_alembic.py
index cb2d4ef424..bdd19b23d4 100644
--- a/openpype/hosts/houdini/plugins/publish/extract_alembic.py
+++ b/openpype/hosts/houdini/plugins/publish/extract_alembic.py
@@ -13,7 +13,7 @@ class ExtractAlembic(publish.Extractor):
order = pyblish.api.ExtractorOrder
label = "Extract Alembic"
hosts = ["houdini"]
- families = ["pointcache", "camera"]
+ families = ["abc", "camera"]
def process(self, instance):
diff --git a/openpype/hosts/houdini/plugins/publish/extract_bgeo.py b/openpype/hosts/houdini/plugins/publish/extract_bgeo.py
new file mode 100644
index 0000000000..c9625ec880
--- /dev/null
+++ b/openpype/hosts/houdini/plugins/publish/extract_bgeo.py
@@ -0,0 +1,53 @@
+import os
+
+import pyblish.api
+
+from openpype.pipeline import publish
+from openpype.hosts.houdini.api.lib import render_rop
+from openpype.hosts.houdini.api import lib
+
+import hou
+
+
+class ExtractBGEO(publish.Extractor):
+
+ order = pyblish.api.ExtractorOrder
+ label = "Extract BGEO"
+ hosts = ["houdini"]
+ families = ["bgeo"]
+
+ def process(self, instance):
+
+ ropnode = hou.node(instance.data["instance_node"])
+
+ # Get the filename from the filename parameter
+ output = ropnode.evalParm("sopoutput")
+ staging_dir, file_name = os.path.split(output)
+ instance.data["stagingDir"] = staging_dir
+
+ # We run the render
+ self.log.info("Writing bgeo files '{}' to '{}'.".format(
+ file_name, staging_dir))
+
+ # write files
+ render_rop(ropnode)
+
+ output = instance.data["frames"]
+
+ _, ext = lib.splitext(
+ output[0], allowed_multidot_extensions=[
+ ".ass.gz", ".bgeo.sc", ".bgeo.gz",
+ ".bgeo.lzma", ".bgeo.bz2"])
+
+ if "representations" not in instance.data:
+ instance.data["representations"] = []
+
+ representation = {
+ "name": "bgeo",
+ "ext": ext.lstrip("."),
+ "files": output,
+ "stagingDir": staging_dir,
+ "frameStart": instance.data["frameStart"],
+ "frameEnd": instance.data["frameEnd"]
+ }
+ instance.data["representations"].append(representation)
diff --git a/openpype/hosts/houdini/plugins/publish/extract_fbx.py b/openpype/hosts/houdini/plugins/publish/extract_fbx.py
new file mode 100644
index 0000000000..7993b3352f
--- /dev/null
+++ b/openpype/hosts/houdini/plugins/publish/extract_fbx.py
@@ -0,0 +1,53 @@
+# -*- coding: utf-8 -*-
+"""Fbx Extractor for houdini. """
+
+import os
+import pyblish.api
+from openpype.pipeline import publish
+from openpype.hosts.houdini.api.lib import render_rop
+
+import hou
+
+
+class ExtractFBX(publish.Extractor):
+
+ label = "Extract FBX"
+ families = ["fbx"]
+ hosts = ["houdini"]
+
+ order = pyblish.api.ExtractorOrder + 0.1
+
+ def process(self, instance):
+
+ # get rop node
+ ropnode = hou.node(instance.data.get("instance_node"))
+ output_file = ropnode.evalParm("sopoutput")
+
+ # get staging_dir and file_name
+ staging_dir = os.path.normpath(os.path.dirname(output_file))
+ file_name = os.path.basename(output_file)
+
+ # render rop
+ self.log.debug("Writing FBX '%s' to '%s'", file_name, staging_dir)
+ render_rop(ropnode)
+
+ # prepare representation
+ representation = {
+ "name": "fbx",
+ "ext": "fbx",
+ "files": file_name,
+ "stagingDir": staging_dir
+ }
+
+ # A single frame may also be rendered without start/end frame.
+ if "frameStart" in instance.data and "frameEnd" in instance.data:
+ representation["frameStart"] = instance.data["frameStart"]
+ representation["frameEnd"] = instance.data["frameEnd"]
+
+ # set value type for 'representations' key to list
+ if "representations" not in instance.data:
+ instance.data["representations"] = []
+
+ # update instance data
+ instance.data["stagingDir"] = staging_dir
+ instance.data["representations"].append(representation)
diff --git a/openpype/hosts/houdini/plugins/publish/extract_usd_layered.py b/openpype/hosts/houdini/plugins/publish/extract_usd_layered.py
index 8422a3bc3e..d6193f13c1 100644
--- a/openpype/hosts/houdini/plugins/publish/extract_usd_layered.py
+++ b/openpype/hosts/houdini/plugins/publish/extract_usd_layered.py
@@ -14,7 +14,6 @@ from openpype.client import (
)
from openpype.pipeline import (
get_representation_path,
- legacy_io,
publish,
)
import openpype.hosts.houdini.api.usd as hou_usdlib
@@ -250,7 +249,7 @@ class ExtractUSDLayered(publish.Extractor):
# Set up the dependency for publish if they have new content
# compared to previous publishes
- project_name = legacy_io.active_project()
+ project_name = instance.context.data["projectName"]
for dependency in active_dependencies:
dependency_fname = dependency.data["usdFilename"]
diff --git a/openpype/hosts/houdini/plugins/publish/increment_current_file.py b/openpype/hosts/houdini/plugins/publish/increment_current_file.py
index 2493b28bc1..3569de7693 100644
--- a/openpype/hosts/houdini/plugins/publish/increment_current_file.py
+++ b/openpype/hosts/houdini/plugins/publish/increment_current_file.py
@@ -2,7 +2,7 @@ import pyblish.api
from openpype.lib import version_up
from openpype.pipeline import registered_host
-from openpype.action import get_errored_plugins_from_data
+from openpype.pipeline.publish import get_errored_plugins_from_context
from openpype.hosts.houdini.api import HoudiniHost
from openpype.pipeline.publish import KnownPublishError
@@ -27,7 +27,7 @@ class IncrementCurrentFile(pyblish.api.ContextPlugin):
def process(self, context):
- errored_plugins = get_errored_plugins_from_data(context)
+ errored_plugins = get_errored_plugins_from_context(context)
if any(
plugin.__name__ == "HoudiniSubmitPublishDeadline"
for plugin in errored_plugins
@@ -40,9 +40,10 @@ class IncrementCurrentFile(pyblish.api.ContextPlugin):
# Filename must not have changed since collecting
host = registered_host() # type: HoudiniHost
current_file = host.current_file()
- assert (
- context.data["currentFile"] == current_file
- ), "Collected filename mismatches from current scene name."
+ if context.data["currentFile"] != current_file:
+ raise KnownPublishError(
+ "Collected filename mismatches from current scene name."
+ )
new_filepath = version_up(current_file)
host.save_workfile(new_filepath)
diff --git a/openpype/hosts/houdini/plugins/publish/validate_abc_primitive_to_detail.py b/openpype/hosts/houdini/plugins/publish/validate_abc_primitive_to_detail.py
index bef8db45a4..af9e080466 100644
--- a/openpype/hosts/houdini/plugins/publish/validate_abc_primitive_to_detail.py
+++ b/openpype/hosts/houdini/plugins/publish/validate_abc_primitive_to_detail.py
@@ -17,7 +17,7 @@ class ValidateAbcPrimitiveToDetail(pyblish.api.InstancePlugin):
"""
order = pyblish.api.ValidatorOrder + 0.1
- families = ["pointcache"]
+ families = ["abc"]
hosts = ["houdini"]
label = "Validate Primitive to Detail (Abc)"
diff --git a/openpype/hosts/houdini/plugins/publish/validate_alembic_face_sets.py b/openpype/hosts/houdini/plugins/publish/validate_alembic_face_sets.py
index 44d58cfa36..40114bc40e 100644
--- a/openpype/hosts/houdini/plugins/publish/validate_alembic_face_sets.py
+++ b/openpype/hosts/houdini/plugins/publish/validate_alembic_face_sets.py
@@ -18,7 +18,7 @@ class ValidateAlembicROPFaceSets(pyblish.api.InstancePlugin):
"""
order = pyblish.api.ValidatorOrder + 0.1
- families = ["pointcache"]
+ families = ["abc"]
hosts = ["houdini"]
label = "Validate Alembic ROP Face Sets"
diff --git a/openpype/hosts/houdini/plugins/publish/validate_alembic_input_node.py b/openpype/hosts/houdini/plugins/publish/validate_alembic_input_node.py
index b0cf4cdc58..47c47e4ea2 100644
--- a/openpype/hosts/houdini/plugins/publish/validate_alembic_input_node.py
+++ b/openpype/hosts/houdini/plugins/publish/validate_alembic_input_node.py
@@ -14,7 +14,7 @@ class ValidateAlembicInputNode(pyblish.api.InstancePlugin):
"""
order = pyblish.api.ValidatorOrder + 0.1
- families = ["pointcache"]
+ families = ["abc"]
hosts = ["houdini"]
label = "Validate Input Node (Abc)"
diff --git a/openpype/hosts/houdini/plugins/publish/validate_animation_settings.py b/openpype/hosts/houdini/plugins/publish/validate_animation_settings.py
index 4878738ed3..79387fbef5 100644
--- a/openpype/hosts/houdini/plugins/publish/validate_animation_settings.py
+++ b/openpype/hosts/houdini/plugins/publish/validate_animation_settings.py
@@ -1,5 +1,6 @@
import pyblish.api
+from openpype.pipeline.publish import PublishValidationError
from openpype.hosts.houdini.api import lib
import hou
@@ -30,7 +31,7 @@ class ValidateAnimationSettings(pyblish.api.InstancePlugin):
invalid = self.get_invalid(instance)
if invalid:
- raise RuntimeError(
+ raise PublishValidationError(
"Output settings do no match for '%s'" % instance
)
diff --git a/openpype/hosts/houdini/plugins/publish/validate_fbx_output_node.py b/openpype/hosts/houdini/plugins/publish/validate_fbx_output_node.py
new file mode 100644
index 0000000000..894dad7d72
--- /dev/null
+++ b/openpype/hosts/houdini/plugins/publish/validate_fbx_output_node.py
@@ -0,0 +1,140 @@
+# -*- coding: utf-8 -*-
+import pyblish.api
+from openpype.pipeline import PublishValidationError
+from openpype.hosts.houdini.api.action import (
+ SelectInvalidAction,
+ SelectROPAction,
+)
+from openpype.hosts.houdini.api.lib import get_obj_node_output
+import hou
+
+
+class ValidateFBXOutputNode(pyblish.api.InstancePlugin):
+ """Validate the instance Output Node.
+
+ This will ensure:
+ - The Output Node Path is set.
+ - The Output Node Path refers to an existing object.
+ - The Output Node is a Sop or Obj node.
+ - The Output Node has geometry data.
+ - The Output Node doesn't include invalid primitive types.
+ """
+
+ order = pyblish.api.ValidatorOrder
+ families = ["fbx"]
+ hosts = ["houdini"]
+ label = "Validate FBX Output Node"
+ actions = [SelectROPAction, SelectInvalidAction]
+
+ def process(self, instance):
+
+ invalid = self.get_invalid(instance)
+ if invalid:
+ nodes = [n.path() for n in invalid]
+ raise PublishValidationError(
+ "See log for details. "
+ "Invalid nodes: {0}".format(nodes),
+ title="Invalid output node(s)"
+ )
+
+ @classmethod
+ def get_invalid(cls, instance):
+ output_node = instance.data.get("output_node")
+
+ # Check if The Output Node Path is set and
+ # refers to an existing object.
+ if output_node is None:
+ rop_node = hou.node(instance.data["instance_node"])
+ cls.log.error(
+ "Output node in '%s' does not exist. "
+ "Ensure a valid output path is set.", rop_node.path()
+ )
+
+ return [rop_node]
+
+ # Check if the Output Node is a Sop or an Obj node
+ # also, list all sop output nodes inside as well as
+ # invalid empty nodes.
+ all_out_sops = []
+ invalid = []
+
+ # if output_node is an ObjSubnet or an ObjNetwork
+ if output_node.childTypeCategory() == hou.objNodeTypeCategory():
+ for node in output_node.allSubChildren():
+ if node.type().name() == "geo":
+ out = get_obj_node_output(node)
+ if out:
+ all_out_sops.append(out)
+ else:
+ invalid.append(node) # empty_objs
+ cls.log.error(
+ "Geo Obj Node '%s' is empty!",
+ node.path()
+ )
+ if not all_out_sops:
+ invalid.append(output_node) # empty_objs
+ cls.log.error(
+ "Output Node '%s' is empty!",
+ node.path()
+ )
+
+ # elif output_node is an ObjNode
+ elif output_node.type().name() == "geo":
+ out = get_obj_node_output(output_node)
+ if out:
+ all_out_sops.append(out)
+ else:
+ invalid.append(node) # empty_objs
+ cls.log.error(
+ "Output Node '%s' is empty!",
+ node.path()
+ )
+
+ # elif output_node is a SopNode
+ elif output_node.type().category().name() == "Sop":
+ all_out_sops.append(output_node)
+
+ # Then it's a wrong node type
+ else:
+ cls.log.error(
+ "Output node %s is not a SOP or OBJ Geo or OBJ SubNet node. "
+ "Instead found category type: %s %s",
+ output_node.path(), output_node.type().category().name(),
+ output_node.type().name()
+ )
+ return [output_node]
+
+ # Check if all output sop nodes have geometry
+ # and don't contain invalid prims
+ invalid_prim_types = ["VDB", "Volume"]
+ for sop_node in all_out_sops:
+ # Empty Geometry test
+ if not hasattr(sop_node, "geometry"):
+ invalid.append(sop_node) # empty_geometry
+ cls.log.error(
+ "Sop node '%s' doesn't include any prims.",
+ sop_node.path()
+ )
+ continue
+
+ frame = instance.data.get("frameStart", 0)
+ geo = sop_node.geometryAtFrame(frame)
+ if len(geo.iterPrims()) == 0:
+ invalid.append(sop_node) # empty_geometry
+ cls.log.error(
+ "Sop node '%s' doesn't include any prims.",
+ sop_node.path()
+ )
+ continue
+
+ # Invalid Prims test
+ for prim_type in invalid_prim_types:
+ if geo.countPrimType(prim_type) > 0:
+ invalid.append(sop_node) # invalid_prims
+ cls.log.error(
+ "Sop node '%s' includes invalid prims of type '%s'.",
+ sop_node.path(), prim_type
+ )
+
+ if invalid:
+ return invalid
diff --git a/openpype/hosts/houdini/plugins/publish/validate_file_extension.py b/openpype/hosts/houdini/plugins/publish/validate_file_extension.py
index 4584e78f4f..6594d10851 100644
--- a/openpype/hosts/houdini/plugins/publish/validate_file_extension.py
+++ b/openpype/hosts/houdini/plugins/publish/validate_file_extension.py
@@ -19,12 +19,11 @@ class ValidateFileExtension(pyblish.api.InstancePlugin):
"""
order = pyblish.api.ValidatorOrder
- families = ["pointcache", "camera", "vdbcache"]
+ families = ["camera", "vdbcache"]
hosts = ["houdini"]
label = "Output File Extension"
family_extensions = {
- "pointcache": ".abc",
"camera": ".abc",
"vdbcache": ".vdb",
}
diff --git a/openpype/hosts/houdini/plugins/publish/validate_mesh_is_static.py b/openpype/hosts/houdini/plugins/publish/validate_mesh_is_static.py
new file mode 100644
index 0000000000..b499682e0b
--- /dev/null
+++ b/openpype/hosts/houdini/plugins/publish/validate_mesh_is_static.py
@@ -0,0 +1,59 @@
+# -*- coding: utf-8 -*-
+"""Validator for correct naming of Static Meshes."""
+import pyblish.api
+from openpype.pipeline import (
+ PublishValidationError,
+ OptionalPyblishPluginMixin
+)
+from openpype.pipeline.publish import ValidateContentsOrder
+
+from openpype.hosts.houdini.api.action import SelectInvalidAction
+from openpype.hosts.houdini.api.lib import get_output_children
+
+
+class ValidateMeshIsStatic(pyblish.api.InstancePlugin,
+ OptionalPyblishPluginMixin):
+ """Validate mesh is static.
+
+ It checks if output node is time dependent.
+ """
+
+ families = ["staticMesh"]
+ hosts = ["houdini"]
+ label = "Validate Mesh is Static"
+ order = ValidateContentsOrder + 0.1
+ actions = [SelectInvalidAction]
+
+ def process(self, instance):
+
+ invalid = self.get_invalid(instance)
+ if invalid:
+ nodes = [n.path() for n in invalid]
+ raise PublishValidationError(
+ "See log for details. "
+ "Invalid nodes: {0}".format(nodes)
+ )
+
+ @classmethod
+ def get_invalid(cls, instance):
+
+ invalid = []
+
+ output_node = instance.data.get("output_node")
+ if output_node is None:
+ cls.log.debug(
+ "No Output Node, skipping check.."
+ )
+ return
+
+ all_outputs = get_output_children(output_node)
+
+ for output in all_outputs:
+ if output.isTimeDependent():
+ invalid.append(output)
+ cls.log.error(
+ "Output node '%s' is time dependent.",
+ output.path()
+ )
+
+ return invalid
diff --git a/openpype/hosts/houdini/plugins/publish/validate_primitive_hierarchy_paths.py b/openpype/hosts/houdini/plugins/publish/validate_primitive_hierarchy_paths.py
index cd5e724ab3..471fa5b6d1 100644
--- a/openpype/hosts/houdini/plugins/publish/validate_primitive_hierarchy_paths.py
+++ b/openpype/hosts/houdini/plugins/publish/validate_primitive_hierarchy_paths.py
@@ -1,10 +1,19 @@
# -*- coding: utf-8 -*-
import pyblish.api
-from openpype.pipeline.publish import ValidateContentsOrder
from openpype.pipeline import PublishValidationError
+from openpype.pipeline.publish import (
+ ValidateContentsOrder,
+ RepairAction,
+)
+
import hou
+class AddDefaultPathAction(RepairAction):
+ label = "Add a default path attribute"
+ icon = "mdi.pencil-plus-outline"
+
+
class ValidatePrimitiveHierarchyPaths(pyblish.api.InstancePlugin):
"""Validate all primitives build hierarchy from attribute when enabled.
@@ -15,15 +24,17 @@ class ValidatePrimitiveHierarchyPaths(pyblish.api.InstancePlugin):
"""
order = ValidateContentsOrder + 0.1
- families = ["pointcache"]
+ families = ["abc"]
hosts = ["houdini"]
label = "Validate Prims Hierarchy Path"
+ actions = [AddDefaultPathAction]
def process(self, instance):
invalid = self.get_invalid(instance)
if invalid:
+ nodes = [n.path() for n in invalid]
raise PublishValidationError(
- "See log for details. " "Invalid nodes: {0}".format(invalid),
+ "See log for details. " "Invalid nodes: {0}".format(nodes),
title=self.label
)
@@ -36,10 +47,10 @@ class ValidatePrimitiveHierarchyPaths(pyblish.api.InstancePlugin):
if output_node is None:
cls.log.error(
"SOP Output node in '%s' does not exist. "
- "Ensure a valid SOP output path is set." % rop_node.path()
+ "Ensure a valid SOP output path is set.", rop_node.path()
)
- return [rop_node.path()]
+ return [rop_node]
build_from_path = rop_node.parm("build_from_path").eval()
if not build_from_path:
@@ -56,9 +67,9 @@ class ValidatePrimitiveHierarchyPaths(pyblish.api.InstancePlugin):
"value set, but 'Build Hierarchy from Attribute'"
"is enabled."
)
- return [rop_node.path()]
+ return [rop_node]
- cls.log.debug("Checking for attribute: %s" % path_attr)
+ cls.log.debug("Checking for attribute: %s", path_attr)
if not hasattr(output_node, "geometry"):
# In the case someone has explicitly set an Object
@@ -89,17 +100,17 @@ class ValidatePrimitiveHierarchyPaths(pyblish.api.InstancePlugin):
if not attrib:
cls.log.info(
"Geometry Primitives are missing "
- "path attribute: `%s`" % path_attr
+ "path attribute: `%s`", path_attr
)
- return [output_node.path()]
+ return [output_node]
# Ensure at least a single string value is present
if not attrib.strings():
cls.log.info(
"Primitive path attribute has no "
- "string values: %s" % path_attr
+ "string values: %s", path_attr
)
- return [output_node.path()]
+ return [output_node]
paths = geo.primStringAttribValues(path_attr)
# Ensure all primitives are set to a valid path
@@ -109,6 +120,65 @@ class ValidatePrimitiveHierarchyPaths(pyblish.api.InstancePlugin):
num_prims = len(geo.iterPrims()) # faster than len(geo.prims())
cls.log.info(
"Prims have no value for attribute `%s` "
- "(%s of %s prims)" % (path_attr, len(invalid_prims), num_prims)
+ "(%s of %s prims)", path_attr, len(invalid_prims), num_prims
+ )
+ return [output_node]
+
+ @classmethod
+ def repair(cls, instance):
+ """Add a default path attribute Action.
+
+ It is a helper action more than a repair action,
+ used to add a default single value for the path.
+ """
+
+ rop_node = hou.node(instance.data["instance_node"])
+ output_node = rop_node.parm("sop_path").evalAsNode()
+
+ if not output_node:
+ cls.log.debug(
+ "Action isn't performed, invalid SOP Path on %s",
+ rop_node
+ )
+ return
+
+ # This check to prevent the action from running multiple times.
+ # git_invalid only returns [output_node] when
+ # path attribute is the problem
+ if cls.get_invalid(instance) != [output_node]:
+ return
+
+ path_attr = rop_node.parm("path_attrib").eval()
+
+ path_node = output_node.parent().createNode("name", "AUTO_PATH")
+ path_node.parm("attribname").set(path_attr)
+ path_node.parm("name1").set('`opname("..")`/`opname("..")`Shape')
+
+ cls.log.debug(
+ "'%s' was created. It adds '%s' with a default single value",
+ path_node, path_attr
+ )
+
+ path_node.setGenericFlag(hou.nodeFlag.DisplayComment, True)
+ path_node.setComment(
+ 'Auto path node was created automatically by '
+ '"Add a default path attribute"'
+ '\nFeel free to modify or replace it.'
+ )
+
+ if output_node.type().name() in ["null", "output"]:
+ # Connect before
+ path_node.setFirstInput(output_node.input(0))
+ path_node.moveToGoodPosition()
+ output_node.setFirstInput(path_node)
+ output_node.moveToGoodPosition()
+ else:
+ # Connect after
+ path_node.setFirstInput(output_node)
+ rop_node.parm("sop_path").set(path_node.path())
+ path_node.moveToGoodPosition()
+
+ cls.log.debug(
+ "SOP path on '%s' updated to new output node '%s'",
+ rop_node, path_node
)
- return [output_node.path()]
diff --git a/openpype/hosts/houdini/plugins/publish/validate_remote_publish.py b/openpype/hosts/houdini/plugins/publish/validate_remote_publish.py
index 4e8e5fc0e8..4f71d79382 100644
--- a/openpype/hosts/houdini/plugins/publish/validate_remote_publish.py
+++ b/openpype/hosts/houdini/plugins/publish/validate_remote_publish.py
@@ -36,11 +36,11 @@ class ValidateRemotePublishOutNode(pyblish.api.ContextPlugin):
if node.parm("shellexec").eval():
self.raise_error("Must not execute in shell")
if node.parm("prerender").eval() != cmd:
- self.raise_error(("REMOTE_PUBLISH node does not have "
- "correct prerender script."))
+ self.raise_error("REMOTE_PUBLISH node does not have "
+ "correct prerender script.")
if node.parm("lprerender").eval() != "python":
- self.raise_error(("REMOTE_PUBLISH node prerender script "
- "type not set to 'python'"))
+ self.raise_error("REMOTE_PUBLISH node prerender script "
+ "type not set to 'python'")
@classmethod
def repair(cls, context):
@@ -48,5 +48,4 @@ class ValidateRemotePublishOutNode(pyblish.api.ContextPlugin):
lib.create_remote_publish_node(force=True)
def raise_error(self, message):
- self.log.error(message)
- raise PublishValidationError(message, title=self.label)
+ raise PublishValidationError(message)
diff --git a/openpype/hosts/houdini/plugins/publish/validate_review_colorspace.py b/openpype/hosts/houdini/plugins/publish/validate_review_colorspace.py
new file mode 100644
index 0000000000..03ecd1b052
--- /dev/null
+++ b/openpype/hosts/houdini/plugins/publish/validate_review_colorspace.py
@@ -0,0 +1,90 @@
+# -*- coding: utf-8 -*-
+import pyblish.api
+from openpype.pipeline import (
+ PublishValidationError,
+ OptionalPyblishPluginMixin
+)
+from openpype.pipeline.publish import RepairAction
+from openpype.hosts.houdini.api.action import SelectROPAction
+
+import os
+import hou
+
+
+class SetDefaultViewSpaceAction(RepairAction):
+ label = "Set default view colorspace"
+ icon = "mdi.monitor"
+
+
+class ValidateReviewColorspace(pyblish.api.InstancePlugin,
+ OptionalPyblishPluginMixin):
+ """Validate Review Colorspace parameters.
+
+ It checks if 'OCIO Colorspace' parameter was set to valid value.
+ """
+
+ order = pyblish.api.ValidatorOrder + 0.1
+ families = ["review"]
+ hosts = ["houdini"]
+ label = "Validate Review Colorspace"
+ actions = [SetDefaultViewSpaceAction, SelectROPAction]
+
+ optional = True
+
+ def process(self, instance):
+
+ if not self.is_active(instance.data):
+ return
+
+ if os.getenv("OCIO") is None:
+ self.log.debug(
+ "Using Houdini's Default Color Management, "
+ " skipping check.."
+ )
+ return
+
+ rop_node = hou.node(instance.data["instance_node"])
+ if rop_node.evalParm("colorcorrect") != 2:
+ # any colorspace settings other than default requires
+ # 'Color Correct' parm to be set to 'OpenColorIO'
+ raise PublishValidationError(
+ "'Color Correction' parm on '{}' ROP must be set to"
+ " 'OpenColorIO'".format(rop_node.path())
+ )
+
+ if rop_node.evalParm("ociocolorspace") not in \
+ hou.Color.ocio_spaces():
+
+ raise PublishValidationError(
+ "Invalid value: Colorspace name doesn't exist.\n"
+ "Check 'OCIO Colorspace' parameter on '{}' ROP"
+ .format(rop_node.path())
+ )
+
+ @classmethod
+ def repair(cls, instance):
+ """Set Default View Space Action.
+
+ It is a helper action more than a repair action,
+ used to set colorspace on opengl node to the default view.
+ """
+ from openpype.hosts.houdini.api.colorspace import get_default_display_view_colorspace # noqa
+
+ rop_node = hou.node(instance.data["instance_node"])
+
+ if rop_node.evalParm("colorcorrect") != 2:
+ rop_node.setParms({"colorcorrect": 2})
+ cls.log.debug(
+ "'Color Correction' parm on '{}' has been set to"
+ " 'OpenColorIO'".format(rop_node.path())
+ )
+
+ # Get default view colorspace name
+ default_view_space = get_default_display_view_colorspace()
+
+ rop_node.setParms({"ociocolorspace": default_view_space})
+ cls.log.info(
+ "'OCIO Colorspace' parm on '{}' has been set to "
+ "the default view color space '{}'"
+ .format(rop_node, default_view_space)
+ )
diff --git a/openpype/hosts/houdini/plugins/publish/validate_sop_output_node.py b/openpype/hosts/houdini/plugins/publish/validate_sop_output_node.py
index ed7f438729..9590e37d26 100644
--- a/openpype/hosts/houdini/plugins/publish/validate_sop_output_node.py
+++ b/openpype/hosts/houdini/plugins/publish/validate_sop_output_node.py
@@ -1,6 +1,12 @@
# -*- coding: utf-8 -*-
import pyblish.api
from openpype.pipeline import PublishValidationError
+from openpype.hosts.houdini.api.action import (
+ SelectInvalidAction,
+ SelectROPAction,
+)
+
+import hou
class ValidateSopOutputNode(pyblish.api.InstancePlugin):
@@ -18,7 +24,8 @@ class ValidateSopOutputNode(pyblish.api.InstancePlugin):
order = pyblish.api.ValidatorOrder
families = ["pointcache", "vdbcache"]
hosts = ["houdini"]
- label = "Validate Output Node"
+ label = "Validate Output Node (SOP)"
+ actions = [SelectROPAction, SelectInvalidAction]
def process(self, instance):
@@ -31,9 +38,6 @@ class ValidateSopOutputNode(pyblish.api.InstancePlugin):
@classmethod
def get_invalid(cls, instance):
-
- import hou
-
output_node = instance.data.get("output_node")
if output_node is None:
@@ -43,7 +47,7 @@ class ValidateSopOutputNode(pyblish.api.InstancePlugin):
"Ensure a valid SOP output path is set." % node.path()
)
- return [node.path()]
+ return [node]
# Output node must be a Sop node.
if not isinstance(output_node, hou.SopNode):
@@ -53,7 +57,7 @@ class ValidateSopOutputNode(pyblish.api.InstancePlugin):
"instead found category type: %s"
% (output_node.path(), output_node.type().category().name())
)
- return [output_node.path()]
+ return [output_node]
# For the sake of completeness also assert the category type
# is Sop to avoid potential edge case scenarios even though
@@ -73,11 +77,11 @@ class ValidateSopOutputNode(pyblish.api.InstancePlugin):
except hou.Error as exc:
cls.log.error("Cook failed: %s" % exc)
cls.log.error(output_node.errors()[0])
- return [output_node.path()]
+ return [output_node]
# Ensure the output node has at least Geometry data
if not output_node.geometry():
cls.log.error(
"Output node `%s` has no geometry data." % output_node.path()
)
- return [output_node.path()]
+ return [output_node]
diff --git a/openpype/hosts/houdini/plugins/publish/validate_subset_name.py b/openpype/hosts/houdini/plugins/publish/validate_subset_name.py
new file mode 100644
index 0000000000..bb3648f361
--- /dev/null
+++ b/openpype/hosts/houdini/plugins/publish/validate_subset_name.py
@@ -0,0 +1,93 @@
+# -*- coding: utf-8 -*-
+"""Validator for correct naming of Static Meshes."""
+import pyblish.api
+from openpype.pipeline import (
+ PublishValidationError,
+ OptionalPyblishPluginMixin
+)
+from openpype.pipeline.publish import (
+ ValidateContentsOrder,
+ RepairAction,
+)
+from openpype.hosts.houdini.api.action import SelectInvalidAction
+from openpype.pipeline.create import get_subset_name
+
+import hou
+
+
+class FixSubsetNameAction(RepairAction):
+ label = "Fix Subset Name"
+
+
+class ValidateSubsetName(pyblish.api.InstancePlugin,
+ OptionalPyblishPluginMixin):
+ """Validate Subset name.
+
+ """
+
+ families = ["staticMesh"]
+ hosts = ["houdini"]
+ label = "Validate Subset Name"
+ order = ValidateContentsOrder + 0.1
+ actions = [FixSubsetNameAction, SelectInvalidAction]
+
+ optional = True
+
+ def process(self, instance):
+
+ if not self.is_active(instance.data):
+ return
+
+ invalid = self.get_invalid(instance)
+ if invalid:
+ nodes = [n.path() for n in invalid]
+ raise PublishValidationError(
+ "See log for details. "
+ "Invalid nodes: {0}".format(nodes)
+ )
+
+ @classmethod
+ def get_invalid(cls, instance):
+
+ invalid = []
+
+ rop_node = hou.node(instance.data["instance_node"])
+
+ # Check subset name
+ subset_name = get_subset_name(
+ family=instance.data["family"],
+ variant=instance.data["variant"],
+ task_name=instance.data["task"],
+ asset_doc=instance.data["assetEntity"],
+ dynamic_data={"asset": instance.data["asset"]}
+ )
+
+ if instance.data.get("subset") != subset_name:
+ invalid.append(rop_node)
+ cls.log.error(
+ "Invalid subset name on rop node '%s' should be '%s'.",
+ rop_node.path(), subset_name
+ )
+
+ return invalid
+
+ @classmethod
+ def repair(cls, instance):
+ rop_node = hou.node(instance.data["instance_node"])
+
+ # Check subset name
+ subset_name = get_subset_name(
+ family=instance.data["family"],
+ variant=instance.data["variant"],
+ task_name=instance.data["task"],
+ asset_doc=instance.data["assetEntity"],
+ dynamic_data={"asset": instance.data["asset"]}
+ )
+
+ instance.data["subset"] = subset_name
+ rop_node.parm("subset").set(subset_name)
+
+ cls.log.debug(
+ "Subset name on rop node '%s' has been set to '%s'.",
+ rop_node.path(), subset_name
+ )
diff --git a/openpype/hosts/houdini/plugins/publish/validate_unreal_staticmesh_naming.py b/openpype/hosts/houdini/plugins/publish/validate_unreal_staticmesh_naming.py
new file mode 100644
index 0000000000..ae3c7e5602
--- /dev/null
+++ b/openpype/hosts/houdini/plugins/publish/validate_unreal_staticmesh_naming.py
@@ -0,0 +1,97 @@
+# -*- coding: utf-8 -*-
+"""Validator for correct naming of Static Meshes."""
+import pyblish.api
+from openpype.pipeline import (
+ PublishValidationError,
+ OptionalPyblishPluginMixin
+)
+from openpype.pipeline.publish import ValidateContentsOrder
+
+from openpype.hosts.houdini.api.action import SelectInvalidAction
+from openpype.hosts.houdini.api.lib import get_output_children
+
+import hou
+
+
+class ValidateUnrealStaticMeshName(pyblish.api.InstancePlugin,
+ OptionalPyblishPluginMixin):
+ """Validate name of Unreal Static Mesh.
+
+ This validator checks if output node name has a collision prefix:
+ - UBX
+ - UCP
+ - USP
+ - UCX
+
+ This validator also checks if subset name is correct
+ - {static mesh prefix}_{Asset-Name}{Variant}.
+
+ """
+
+ families = ["staticMesh"]
+ hosts = ["houdini"]
+ label = "Unreal Static Mesh Name (FBX)"
+ order = ValidateContentsOrder + 0.1
+ actions = [SelectInvalidAction]
+
+ optional = True
+ collision_prefixes = []
+ static_mesh_prefix = ""
+
+ @classmethod
+ def apply_settings(cls, project_settings, system_settings):
+
+ settings = (
+ project_settings["houdini"]["create"]["CreateStaticMesh"]
+ )
+ cls.collision_prefixes = settings["collision_prefixes"]
+ cls.static_mesh_prefix = settings["static_mesh_prefix"]
+
+ def process(self, instance):
+
+ if not self.is_active(instance.data):
+ return
+
+ invalid = self.get_invalid(instance)
+ if invalid:
+ nodes = [n.path() for n in invalid]
+ raise PublishValidationError(
+ "See log for details. "
+ "Invalid nodes: {0}".format(nodes)
+ )
+
+ @classmethod
+ def get_invalid(cls, instance):
+
+ invalid = []
+
+ rop_node = hou.node(instance.data["instance_node"])
+ output_node = instance.data.get("output_node")
+ if output_node is None:
+ cls.log.debug(
+ "No Output Node, skipping check.."
+ )
+ return
+
+ if rop_node.evalParm("buildfrompath"):
+ # This validator doesn't support naming check if
+ # building hierarchy from path' is used
+ cls.log.info(
+ "Using 'Build Hierarchy from Path Attribute', skipping check.."
+ )
+ return
+
+ # Check nodes names
+ all_outputs = get_output_children(output_node, include_sops=False)
+ for output in all_outputs:
+ for prefix in cls.collision_prefixes:
+ if output.name().startswith(prefix):
+ invalid.append(output)
+ cls.log.error(
+ "Invalid node name: Node '%s' "
+ "includes a collision prefix '%s'",
+ output.path(), prefix
+ )
+ break
+
+ return invalid
diff --git a/openpype/hosts/houdini/plugins/publish/validate_usd_render_product_names.py b/openpype/hosts/houdini/plugins/publish/validate_usd_render_product_names.py
index 02c44ab94e..1daa96f2b9 100644
--- a/openpype/hosts/houdini/plugins/publish/validate_usd_render_product_names.py
+++ b/openpype/hosts/houdini/plugins/publish/validate_usd_render_product_names.py
@@ -24,7 +24,7 @@ class ValidateUSDRenderProductNames(pyblish.api.InstancePlugin):
if not os.path.isabs(filepath):
invalid.append(
- "Output file path is not " "absolute path: %s" % filepath
+ "Output file path is not absolute path: %s" % filepath
)
if invalid:
diff --git a/openpype/hosts/houdini/plugins/publish/validate_usd_shade_model_exists.py b/openpype/hosts/houdini/plugins/publish/validate_usd_shade_model_exists.py
index c4f118ac3b..0db782d545 100644
--- a/openpype/hosts/houdini/plugins/publish/validate_usd_shade_model_exists.py
+++ b/openpype/hosts/houdini/plugins/publish/validate_usd_shade_model_exists.py
@@ -4,7 +4,6 @@ import re
import pyblish.api
from openpype.client import get_subset_by_name
-from openpype.pipeline import legacy_io
from openpype.pipeline.publish import ValidateContentsOrder
from openpype.pipeline import PublishValidationError
@@ -18,7 +17,7 @@ class ValidateUSDShadeModelExists(pyblish.api.InstancePlugin):
label = "USD Shade model exists"
def process(self, instance):
- project_name = legacy_io.active_project()
+ project_name = instance.context.data["projectName"]
asset_name = instance.data["asset"]
subset = instance.data["subset"]
diff --git a/openpype/hosts/houdini/plugins/publish/validate_workfile_paths.py b/openpype/hosts/houdini/plugins/publish/validate_workfile_paths.py
index 543c8e1407..afe05e3173 100644
--- a/openpype/hosts/houdini/plugins/publish/validate_workfile_paths.py
+++ b/openpype/hosts/houdini/plugins/publish/validate_workfile_paths.py
@@ -7,8 +7,6 @@ from openpype.pipeline import (
)
from openpype.pipeline.publish import RepairAction
-from openpype.pipeline.publish import RepairAction
-
class ValidateWorkfilePaths(
pyblish.api.InstancePlugin, OptionalPyblishPluginMixin):
diff --git a/openpype/hosts/houdini/startup/MainMenuCommon.xml b/openpype/hosts/houdini/startup/MainMenuCommon.xml
index 47a4653d5d..b2e32a70f9 100644
--- a/openpype/hosts/houdini/startup/MainMenuCommon.xml
+++ b/openpype/hosts/houdini/startup/MainMenuCommon.xml
@@ -2,7 +2,19 @@